VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105250

Last change on this file since 105250 was 105238, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vsqrtss,vsqrtsd instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 242.1 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105238 2024-07-09 13:23:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2384FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2385/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2386FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2387
2388
2389/**
2390 * @opcode 0x2b
2391 * @opcodesub !11 mr/reg
2392 * @oppfx none
2393 * @opcpuid avx
2394 * @opgroup og_avx_cachect
2395 * @opxcpttype 1
2396 * @optest op1=1 op2=2 -> op1=2
2397 * @optest op1=0 op2=-42 -> op1=-42
2398 * @note Identical implementation to vmovntpd
2399 */
2400FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2401{
2402 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2403 Assert(pVCpu->iem.s.uVexLength <= 1);
2404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2405 if (IEM_IS_MODRM_MEM_MODE(bRm))
2406 {
2407 /*
2408 * memory, register.
2409 */
2410 if (pVCpu->iem.s.uVexLength == 0)
2411 {
2412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2413 IEM_MC_LOCAL(RTUINT128U, uSrc);
2414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2415
2416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2417 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2418 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2419 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2420
2421 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2422 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP_AND_FINISH();
2425 IEM_MC_END();
2426 }
2427 else
2428 {
2429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2430 IEM_MC_LOCAL(RTUINT256U, uSrc);
2431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2432
2433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2437
2438 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2439 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2440
2441 IEM_MC_ADVANCE_RIP_AND_FINISH();
2442 IEM_MC_END();
2443 }
2444 }
2445 /* The register, register encoding is invalid. */
2446 else
2447 IEMOP_RAISE_INVALID_OPCODE_RET();
2448}
2449
2450/**
2451 * @opcode 0x2b
2452 * @opcodesub !11 mr/reg
2453 * @oppfx 0x66
2454 * @opcpuid avx
2455 * @opgroup og_avx_cachect
2456 * @opxcpttype 1
2457 * @optest op1=1 op2=2 -> op1=2
2458 * @optest op1=0 op2=-42 -> op1=-42
2459 * @note Identical implementation to vmovntps
2460 */
2461FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2462{
2463 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2464 Assert(pVCpu->iem.s.uVexLength <= 1);
2465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2466 if (IEM_IS_MODRM_MEM_MODE(bRm))
2467 {
2468 /*
2469 * memory, register.
2470 */
2471 if (pVCpu->iem.s.uVexLength == 0)
2472 {
2473 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2474 IEM_MC_LOCAL(RTUINT128U, uSrc);
2475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2476
2477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2479 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2480 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2481
2482 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2483 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2484
2485 IEM_MC_ADVANCE_RIP_AND_FINISH();
2486 IEM_MC_END();
2487 }
2488 else
2489 {
2490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2491 IEM_MC_LOCAL(RTUINT256U, uSrc);
2492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2493
2494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2495 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2496 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2497 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2498
2499 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2500 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2501
2502 IEM_MC_ADVANCE_RIP_AND_FINISH();
2503 IEM_MC_END();
2504 }
2505 }
2506 /* The register, register encoding is invalid. */
2507 else
2508 IEMOP_RAISE_INVALID_OPCODE_RET();
2509}
2510
2511/**
2512 * @opmnemonic udvexf30f2b
2513 * @opcode 0x2b
2514 * @oppfx 0xf3
2515 * @opunused vex.modrm
2516 * @opcpuid avx
2517 * @optest ->
2518 * @opdone
2519 */
2520
2521/**
2522 * @opmnemonic udvexf20f2b
2523 * @opcode 0x2b
2524 * @oppfx 0xf2
2525 * @opunused vex.modrm
2526 * @opcpuid avx
2527 * @optest ->
2528 * @opdone
2529 */
2530
2531
2532/* Opcode VEX.0F 0x2c - invalid */
2533/* Opcode VEX.66.0F 0x2c - invalid */
2534/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2535FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2536/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2537FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2538
2539/* Opcode VEX.0F 0x2d - invalid */
2540/* Opcode VEX.66.0F 0x2d - invalid */
2541/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2542FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2543/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2544FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2545
2546
2547/**
2548 * @opcode 0x2e
2549 * @oppfx none
2550 * @opflmodify cf,pf,af,zf,sf,of
2551 * @opflclear af,sf,of
2552 */
2553FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2554{
2555 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2557 if (IEM_IS_MODRM_REG_MODE(bRm))
2558 {
2559 /*
2560 * Register, register.
2561 */
2562 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2563 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2564 IEM_MC_LOCAL(uint32_t, fEFlags);
2565 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2566 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2567 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2568 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2569 IEM_MC_PREPARE_AVX_USAGE();
2570 IEM_MC_FETCH_EFLAGS(fEFlags);
2571 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2572 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2573 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2574 pEFlags, uSrc1, uSrc2);
2575 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2576 IEM_MC_COMMIT_EFLAGS(fEFlags);
2577
2578 IEM_MC_ADVANCE_RIP_AND_FINISH();
2579 IEM_MC_END();
2580 }
2581 else
2582 {
2583 /*
2584 * Register, memory.
2585 */
2586 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2587 IEM_MC_LOCAL(uint32_t, fEFlags);
2588 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2589 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2590 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2592
2593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2594 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2595 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2596 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2597
2598 IEM_MC_PREPARE_AVX_USAGE();
2599 IEM_MC_FETCH_EFLAGS(fEFlags);
2600 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2601 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2602 pEFlags, uSrc1, uSrc2);
2603 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2604 IEM_MC_COMMIT_EFLAGS(fEFlags);
2605
2606 IEM_MC_ADVANCE_RIP_AND_FINISH();
2607 IEM_MC_END();
2608 }
2609}
2610
2611
2612/**
2613 * @opcode 0x2e
2614 * @oppfx 0x66
2615 * @opflmodify cf,pf,af,zf,sf,of
2616 * @opflclear af,sf,of
2617 */
2618FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2619{
2620 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2622 if (IEM_IS_MODRM_REG_MODE(bRm))
2623 {
2624 /*
2625 * Register, register.
2626 */
2627 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2628 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2629 IEM_MC_LOCAL(uint32_t, fEFlags);
2630 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2631 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2632 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2633 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2634 IEM_MC_PREPARE_AVX_USAGE();
2635 IEM_MC_FETCH_EFLAGS(fEFlags);
2636 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2637 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2638 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2639 pEFlags, uSrc1, uSrc2);
2640 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2641 IEM_MC_COMMIT_EFLAGS(fEFlags);
2642
2643 IEM_MC_ADVANCE_RIP_AND_FINISH();
2644 IEM_MC_END();
2645 }
2646 else
2647 {
2648 /*
2649 * Register, memory.
2650 */
2651 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2652 IEM_MC_LOCAL(uint32_t, fEFlags);
2653 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2654 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2655 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2657
2658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2659 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2661 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2662
2663 IEM_MC_PREPARE_AVX_USAGE();
2664 IEM_MC_FETCH_EFLAGS(fEFlags);
2665 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2666 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2667 pEFlags, uSrc1, uSrc2);
2668 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2669 IEM_MC_COMMIT_EFLAGS(fEFlags);
2670
2671 IEM_MC_ADVANCE_RIP_AND_FINISH();
2672 IEM_MC_END();
2673 }
2674}
2675
2676
2677/* Opcode VEX.F3.0F 0x2e - invalid */
2678/* Opcode VEX.F2.0F 0x2e - invalid */
2679
2680/**
2681 * @opcode 0x2f
2682 * @oppfx none
2683 * @opflmodify cf,pf,af,zf,sf,of
2684 * @opflclear af,sf,of
2685 */
2686FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2687{
2688 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (IEM_IS_MODRM_REG_MODE(bRm))
2691 {
2692 /*
2693 * Register, register.
2694 */
2695 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2696 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2697 IEM_MC_LOCAL(uint32_t, fEFlags);
2698 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2699 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2700 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2701 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2702 IEM_MC_PREPARE_AVX_USAGE();
2703 IEM_MC_FETCH_EFLAGS(fEFlags);
2704 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2705 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2706 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2707 pEFlags, uSrc1, uSrc2);
2708 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2709 IEM_MC_COMMIT_EFLAGS(fEFlags);
2710
2711 IEM_MC_ADVANCE_RIP_AND_FINISH();
2712 IEM_MC_END();
2713 }
2714 else
2715 {
2716 /*
2717 * Register, memory.
2718 */
2719 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2720 IEM_MC_LOCAL(uint32_t, fEFlags);
2721 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2722 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2723 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2725
2726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2727 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2728 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2729 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2730
2731 IEM_MC_PREPARE_AVX_USAGE();
2732 IEM_MC_FETCH_EFLAGS(fEFlags);
2733 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2734 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2735 pEFlags, uSrc1, uSrc2);
2736 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2737 IEM_MC_COMMIT_EFLAGS(fEFlags);
2738
2739 IEM_MC_ADVANCE_RIP_AND_FINISH();
2740 IEM_MC_END();
2741 }
2742}
2743
2744
2745/**
2746 * @opcode 0x2f
2747 * @oppfx 0x66
2748 * @opflmodify cf,pf,af,zf,sf,of
2749 * @opflclear af,sf,of
2750 */
2751FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2752{
2753 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2755 if (IEM_IS_MODRM_REG_MODE(bRm))
2756 {
2757 /*
2758 * Register, register.
2759 */
2760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2761 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2762 IEM_MC_LOCAL(uint32_t, fEFlags);
2763 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2764 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2765 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2766 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2767 IEM_MC_PREPARE_AVX_USAGE();
2768 IEM_MC_FETCH_EFLAGS(fEFlags);
2769 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2770 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2771 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2772 pEFlags, uSrc1, uSrc2);
2773 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2774 IEM_MC_COMMIT_EFLAGS(fEFlags);
2775
2776 IEM_MC_ADVANCE_RIP_AND_FINISH();
2777 IEM_MC_END();
2778 }
2779 else
2780 {
2781 /*
2782 * Register, memory.
2783 */
2784 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2785 IEM_MC_LOCAL(uint32_t, fEFlags);
2786 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2787 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2788 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2789 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2790
2791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2792 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2794 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2795
2796 IEM_MC_PREPARE_AVX_USAGE();
2797 IEM_MC_FETCH_EFLAGS(fEFlags);
2798 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2799 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2800 pEFlags, uSrc1, uSrc2);
2801 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2802 IEM_MC_COMMIT_EFLAGS(fEFlags);
2803
2804 IEM_MC_ADVANCE_RIP_AND_FINISH();
2805 IEM_MC_END();
2806 }
2807}
2808
2809
2810/* Opcode VEX.F3.0F 0x2f - invalid */
2811/* Opcode VEX.F2.0F 0x2f - invalid */
2812
2813/* Opcode VEX.0F 0x30 - invalid */
2814/* Opcode VEX.0F 0x31 - invalid */
2815/* Opcode VEX.0F 0x32 - invalid */
2816/* Opcode VEX.0F 0x33 - invalid */
2817/* Opcode VEX.0F 0x34 - invalid */
2818/* Opcode VEX.0F 0x35 - invalid */
2819/* Opcode VEX.0F 0x36 - invalid */
2820/* Opcode VEX.0F 0x37 - invalid */
2821/* Opcode VEX.0F 0x38 - invalid */
2822/* Opcode VEX.0F 0x39 - invalid */
2823/* Opcode VEX.0F 0x3a - invalid */
2824/* Opcode VEX.0F 0x3b - invalid */
2825/* Opcode VEX.0F 0x3c - invalid */
2826/* Opcode VEX.0F 0x3d - invalid */
2827/* Opcode VEX.0F 0x3e - invalid */
2828/* Opcode VEX.0F 0x3f - invalid */
2829/* Opcode VEX.0F 0x40 - invalid */
2830/* Opcode VEX.0F 0x41 - invalid */
2831/* Opcode VEX.0F 0x42 - invalid */
2832/* Opcode VEX.0F 0x43 - invalid */
2833/* Opcode VEX.0F 0x44 - invalid */
2834/* Opcode VEX.0F 0x45 - invalid */
2835/* Opcode VEX.0F 0x46 - invalid */
2836/* Opcode VEX.0F 0x47 - invalid */
2837/* Opcode VEX.0F 0x48 - invalid */
2838/* Opcode VEX.0F 0x49 - invalid */
2839/* Opcode VEX.0F 0x4a - invalid */
2840/* Opcode VEX.0F 0x4b - invalid */
2841/* Opcode VEX.0F 0x4c - invalid */
2842/* Opcode VEX.0F 0x4d - invalid */
2843/* Opcode VEX.0F 0x4e - invalid */
2844/* Opcode VEX.0F 0x4f - invalid */
2845
2846
2847/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2848FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2849{
2850 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2852 if (IEM_IS_MODRM_REG_MODE(bRm))
2853 {
2854 /*
2855 * Register, register.
2856 */
2857 if (pVCpu->iem.s.uVexLength == 0)
2858 {
2859 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2860 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2861 IEM_MC_LOCAL(uint8_t, u8Dst);
2862 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2863 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2865 IEM_MC_PREPARE_AVX_USAGE();
2866 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2867 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2868 pu8Dst, puSrc);
2869 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2870 IEM_MC_ADVANCE_RIP_AND_FINISH();
2871 IEM_MC_END();
2872 }
2873 else
2874 {
2875 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2876 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2877 IEM_MC_LOCAL(uint8_t, u8Dst);
2878 IEM_MC_LOCAL(RTUINT256U, uSrc);
2879 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2880 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2881
2882 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2883 IEM_MC_PREPARE_AVX_USAGE();
2884 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2885 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2886 pu8Dst, puSrc);
2887 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2888 IEM_MC_ADVANCE_RIP_AND_FINISH();
2889 IEM_MC_END();
2890 }
2891 }
2892 /* No memory operand. */
2893 else
2894 IEMOP_RAISE_INVALID_OPCODE_RET();
2895}
2896
2897
2898/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2899FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2900{
2901 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2903 if (IEM_IS_MODRM_REG_MODE(bRm))
2904 {
2905 /*
2906 * Register, register.
2907 */
2908 if (pVCpu->iem.s.uVexLength == 0)
2909 {
2910 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2911 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2912 IEM_MC_LOCAL(uint8_t, u8Dst);
2913 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2914 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2915 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2916 IEM_MC_PREPARE_AVX_USAGE();
2917 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2918 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2919 pu8Dst, puSrc);
2920 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2921 IEM_MC_ADVANCE_RIP_AND_FINISH();
2922 IEM_MC_END();
2923 }
2924 else
2925 {
2926 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2927 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2928 IEM_MC_LOCAL(uint8_t, u8Dst);
2929 IEM_MC_LOCAL(RTUINT256U, uSrc);
2930 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2931 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2932
2933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2934 IEM_MC_PREPARE_AVX_USAGE();
2935 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2936 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2937 pu8Dst, puSrc);
2938 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 }
2943 /* No memory operand. */
2944 else
2945 IEMOP_RAISE_INVALID_OPCODE_RET();
2946}
2947
2948
2949/* Opcode VEX.F3.0F 0x50 - invalid */
2950/* Opcode VEX.F2.0F 0x50 - invalid */
2951
2952/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2953FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
2954{
2955 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2956 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
2957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2958}
2959
2960
2961/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2962FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
2963{
2964 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2965 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
2966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2967}
2968
2969
2970/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2971FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
2972{
2973 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2974 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
2975 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
2976}
2977
2978
2979/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2980FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
2981{
2982 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2983 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
2984 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
2985}
2986
2987
2988/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2989FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2990/* Opcode VEX.66.0F 0x52 - invalid */
2991/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2992FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2993/* Opcode VEX.F2.0F 0x52 - invalid */
2994
2995/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2996FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2997/* Opcode VEX.66.0F 0x53 - invalid */
2998/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2999FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
3000/* Opcode VEX.F2.0F 0x53 - invalid */
3001
3002
3003/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3004FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3005{
3006 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3008 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3009}
3010
3011
3012/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3013FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3014{
3015 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3016 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3017 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3018}
3019
3020
3021/* Opcode VEX.F3.0F 0x54 - invalid */
3022/* Opcode VEX.F2.0F 0x54 - invalid */
3023
3024
3025/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3026FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3027{
3028 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3029 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3030 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3031}
3032
3033
3034/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3035FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3036{
3037 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3039 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3040}
3041
3042
3043/* Opcode VEX.F3.0F 0x55 - invalid */
3044/* Opcode VEX.F2.0F 0x55 - invalid */
3045
3046/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3047FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3048{
3049 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3050 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3051 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3052}
3053
3054
3055/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3056FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3057{
3058 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3059 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3060 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3061}
3062
3063
3064/* Opcode VEX.F3.0F 0x56 - invalid */
3065/* Opcode VEX.F2.0F 0x56 - invalid */
3066
3067
3068/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3069FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3070{
3071 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3072 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3073 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3074}
3075
3076
3077/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3078FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3079{
3080 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3081 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3082 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3083}
3084
3085
3086/* Opcode VEX.F3.0F 0x57 - invalid */
3087/* Opcode VEX.F2.0F 0x57 - invalid */
3088
3089
3090/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3091FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3092{
3093 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3094 IEMOPMEDIAF3_INIT_VARS( vaddps);
3095 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3096}
3097
3098
3099/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3100FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3101{
3102 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3103 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3105}
3106
3107
3108/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3109FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3110{
3111 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3112 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3113 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3114}
3115
3116
3117/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3118FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3119{
3120 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3121 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3122 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3123}
3124
3125
3126/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3127FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3128{
3129 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3130 IEMOPMEDIAF3_INIT_VARS( vmulps);
3131 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3132}
3133
3134
3135/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3136FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3137{
3138 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3139 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3141}
3142
3143
3144/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3145FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3146{
3147 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3148 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3149 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3150}
3151
3152
3153/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3154FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3155{
3156 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3157 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3158 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3159}
3160
3161
3162/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3163FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
3164/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3165FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3166/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3167FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3168/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3169FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3170
3171/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3172FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3173/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3174FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3175/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3176FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3177/* Opcode VEX.F2.0F 0x5b - invalid */
3178
3179
3180/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3181FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3182{
3183 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3184 IEMOPMEDIAF3_INIT_VARS( vsubps);
3185 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3186}
3187
3188
3189/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3190FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3191{
3192 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3193 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3194 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3195}
3196
3197
3198/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3199FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3200{
3201 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3202 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3203 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3204}
3205
3206
3207/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3208FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3209{
3210 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3211 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3212 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3213}
3214
3215
3216/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3217FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3218{
3219 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3220 IEMOPMEDIAF3_INIT_VARS( vminps);
3221 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3222}
3223
3224
3225/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3226FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3227{
3228 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3229 IEMOPMEDIAF3_INIT_VARS( vminpd);
3230 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3231}
3232
3233
3234/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3235FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3236{
3237 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3238 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3239 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3240}
3241
3242
3243/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3244FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3245{
3246 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3247 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3248 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3249}
3250
3251
3252/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3253FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3254{
3255 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3256 IEMOPMEDIAF3_INIT_VARS( vdivps);
3257 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3258}
3259
3260
3261/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3262FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3263{
3264 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3265 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3266 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3267}
3268
3269
3270/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3271FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3272{
3273 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3274 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3275 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3276}
3277
3278
3279/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3280FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3281{
3282 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3283 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3284 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3285}
3286
3287
3288/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3289FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3290{
3291 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3292 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3293 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3294}
3295
3296
3297/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3298FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3299{
3300 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3301 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3302 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3303}
3304
3305
3306/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3307FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3308{
3309 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3310 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3311 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3312}
3313
3314
3315/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3316FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3317{
3318 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3319 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3320 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3321}
3322
3323
3324/* Opcode VEX.0F 0x60 - invalid */
3325
3326
3327/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3328FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3329{
3330 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3331 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3332 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3333}
3334
3335
3336/* Opcode VEX.F3.0F 0x60 - invalid */
3337
3338
3339/* Opcode VEX.0F 0x61 - invalid */
3340
3341
3342/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3343FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3344{
3345 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3346 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3347 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3348}
3349
3350
3351/* Opcode VEX.F3.0F 0x61 - invalid */
3352
3353
3354/* Opcode VEX.0F 0x62 - invalid */
3355
3356/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3357FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3358{
3359 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3360 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3361 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3362}
3363
3364
3365/* Opcode VEX.F3.0F 0x62 - invalid */
3366
3367
3368
3369/* Opcode VEX.0F 0x63 - invalid */
3370
3371
3372/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3373FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3374{
3375 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3376 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3377 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3378}
3379
3380
3381/* Opcode VEX.F3.0F 0x63 - invalid */
3382
3383/* Opcode VEX.0F 0x64 - invalid */
3384
3385
3386/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3387FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3388{
3389 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3390 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3391 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3392}
3393
3394
3395/* Opcode VEX.F3.0F 0x64 - invalid */
3396
3397/* Opcode VEX.0F 0x65 - invalid */
3398
3399
3400/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3401FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3402{
3403 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3404 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3405 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3406}
3407
3408
3409/* Opcode VEX.F3.0F 0x65 - invalid */
3410
3411/* Opcode VEX.0F 0x66 - invalid */
3412
3413
3414/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3415FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3416{
3417 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3418 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3420}
3421
3422
3423/* Opcode VEX.F3.0F 0x66 - invalid */
3424
3425/* Opcode VEX.0F 0x67 - invalid */
3426
3427
3428/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3429FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3430{
3431 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3432 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3433 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3434}
3435
3436
3437/* Opcode VEX.F3.0F 0x67 - invalid */
3438
3439
3440///**
3441// * Common worker for SSE2 instructions on the form:
3442// * pxxxx xmm1, xmm2/mem128
3443// *
3444// * The 2nd operand is the second half of a register, which in the memory case
3445// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3446// * where it may read the full 128 bits or only the upper 64 bits.
3447// *
3448// * Exceptions type 4.
3449// */
3450//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3451//{
3452// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3453// if (IEM_IS_MODRM_REG_MODE(bRm))
3454// {
3455// /*
3456// * Register, register.
3457// */
3458// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3459// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3460// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3461// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3462// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3463// IEM_MC_PREPARE_SSE_USAGE();
3464// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3465// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3466// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3467// IEM_MC_ADVANCE_RIP_AND_FINISH();
3468// IEM_MC_END();
3469// }
3470// else
3471// {
3472// /*
3473// * Register, memory.
3474// */
3475// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3476// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3477// IEM_MC_LOCAL(RTUINT128U, uSrc);
3478// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3479// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480//
3481// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3482// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3483// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3484// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3485//
3486// IEM_MC_PREPARE_SSE_USAGE();
3487// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3488// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3489//
3490// IEM_MC_ADVANCE_RIP_AND_FINISH();
3491// IEM_MC_END();
3492// }
3493// return VINF_SUCCESS;
3494//}
3495
3496
3497/* Opcode VEX.0F 0x68 - invalid */
3498
3499/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3500FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3501{
3502 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3503 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3505}
3506
3507
3508/* Opcode VEX.F3.0F 0x68 - invalid */
3509
3510
3511/* Opcode VEX.0F 0x69 - invalid */
3512
3513
3514/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3515FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3516{
3517 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3518 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3519 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3520}
3521
3522
3523/* Opcode VEX.F3.0F 0x69 - invalid */
3524
3525
3526/* Opcode VEX.0F 0x6a - invalid */
3527
3528
3529/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3530FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3531{
3532 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3533 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3534 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3535}
3536
3537
3538/* Opcode VEX.F3.0F 0x6a - invalid */
3539
3540
3541/* Opcode VEX.0F 0x6b - invalid */
3542
3543
3544/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3545FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3546{
3547 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3548 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3550}
3551
3552
3553/* Opcode VEX.F3.0F 0x6b - invalid */
3554
3555
3556/* Opcode VEX.0F 0x6c - invalid */
3557
3558
3559/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3560FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3561{
3562 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3563 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3564 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3565}
3566
3567
3568/* Opcode VEX.F3.0F 0x6c - invalid */
3569/* Opcode VEX.F2.0F 0x6c - invalid */
3570
3571
3572/* Opcode VEX.0F 0x6d - invalid */
3573
3574
3575/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3576FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3577{
3578 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3579 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3580 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3581}
3582
3583
3584/* Opcode VEX.F3.0F 0x6d - invalid */
3585
3586
3587/* Opcode VEX.0F 0x6e - invalid */
3588
3589FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3590{
3591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3592 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3593 {
3594 /**
3595 * @opcode 0x6e
3596 * @opcodesub rex.w=1
3597 * @oppfx 0x66
3598 * @opcpuid avx
3599 * @opgroup og_avx_simdint_datamov
3600 * @opxcpttype 5
3601 * @optest 64-bit / op1=1 op2=2 -> op1=2
3602 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3603 */
3604 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3605 if (IEM_IS_MODRM_REG_MODE(bRm))
3606 {
3607 /* XMM, greg64 */
3608 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3609 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3610 IEM_MC_LOCAL(uint64_t, u64Tmp);
3611
3612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3613 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3614
3615 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3616 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3617
3618 IEM_MC_ADVANCE_RIP_AND_FINISH();
3619 IEM_MC_END();
3620 }
3621 else
3622 {
3623 /* XMM, [mem64] */
3624 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3626 IEM_MC_LOCAL(uint64_t, u64Tmp);
3627
3628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3629 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3630 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3631 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3632
3633 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3634 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3635
3636 IEM_MC_ADVANCE_RIP_AND_FINISH();
3637 IEM_MC_END();
3638 }
3639 }
3640 else
3641 {
3642 /**
3643 * @opdone
3644 * @opcode 0x6e
3645 * @opcodesub rex.w=0
3646 * @oppfx 0x66
3647 * @opcpuid avx
3648 * @opgroup og_avx_simdint_datamov
3649 * @opxcpttype 5
3650 * @opfunction iemOp_vmovd_q_Vy_Ey
3651 * @optest op1=1 op2=2 -> op1=2
3652 * @optest op1=0 op2=-42 -> op1=-42
3653 */
3654 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3655 if (IEM_IS_MODRM_REG_MODE(bRm))
3656 {
3657 /* XMM, greg32 */
3658 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3659 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3660 IEM_MC_LOCAL(uint32_t, u32Tmp);
3661
3662 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3663 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3664
3665 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3666 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3667
3668 IEM_MC_ADVANCE_RIP_AND_FINISH();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 /* XMM, [mem32] */
3674 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3676 IEM_MC_LOCAL(uint32_t, u32Tmp);
3677
3678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3679 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3682
3683 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3684 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3685
3686 IEM_MC_ADVANCE_RIP_AND_FINISH();
3687 IEM_MC_END();
3688 }
3689 }
3690}
3691
3692
3693/* Opcode VEX.F3.0F 0x6e - invalid */
3694
3695
3696/* Opcode VEX.0F 0x6f - invalid */
3697
3698/**
3699 * @opcode 0x6f
3700 * @oppfx 0x66
3701 * @opcpuid avx
3702 * @opgroup og_avx_simdint_datamove
3703 * @opxcpttype 1
3704 * @optest op1=1 op2=2 -> op1=2
3705 * @optest op1=0 op2=-42 -> op1=-42
3706 */
3707FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3708{
3709 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3710 Assert(pVCpu->iem.s.uVexLength <= 1);
3711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3712 if (IEM_IS_MODRM_REG_MODE(bRm))
3713 {
3714 /*
3715 * Register, register.
3716 */
3717 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3718 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3719
3720 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3721 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3722 if (pVCpu->iem.s.uVexLength == 0)
3723 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3724 IEM_GET_MODRM_RM(pVCpu, bRm));
3725 else
3726 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3727 IEM_GET_MODRM_RM(pVCpu, bRm));
3728 IEM_MC_ADVANCE_RIP_AND_FINISH();
3729 IEM_MC_END();
3730 }
3731 else if (pVCpu->iem.s.uVexLength == 0)
3732 {
3733 /*
3734 * Register, memory128.
3735 */
3736 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3737 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3739
3740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3741 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3742 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3743 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3744
3745 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3746 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3747
3748 IEM_MC_ADVANCE_RIP_AND_FINISH();
3749 IEM_MC_END();
3750 }
3751 else
3752 {
3753 /*
3754 * Register, memory256.
3755 */
3756 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3757 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3759
3760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3761 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3762 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3763 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3764
3765 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3766 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3767
3768 IEM_MC_ADVANCE_RIP_AND_FINISH();
3769 IEM_MC_END();
3770 }
3771}
3772
3773/**
3774 * @opcode 0x6f
3775 * @oppfx 0xf3
3776 * @opcpuid avx
3777 * @opgroup og_avx_simdint_datamove
3778 * @opxcpttype 4UA
3779 * @optest op1=1 op2=2 -> op1=2
3780 * @optest op1=0 op2=-42 -> op1=-42
3781 */
3782FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3783{
3784 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3785 Assert(pVCpu->iem.s.uVexLength <= 1);
3786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3787 if (IEM_IS_MODRM_REG_MODE(bRm))
3788 {
3789 /*
3790 * Register, register.
3791 */
3792 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3793 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3794
3795 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3796 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3797 if (pVCpu->iem.s.uVexLength == 0)
3798 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3799 IEM_GET_MODRM_RM(pVCpu, bRm));
3800 else
3801 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3802 IEM_GET_MODRM_RM(pVCpu, bRm));
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 IEM_MC_END();
3805 }
3806 else if (pVCpu->iem.s.uVexLength == 0)
3807 {
3808 /*
3809 * Register, memory128.
3810 */
3811 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3812 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3814
3815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3816 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3817 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3818 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3819
3820 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3821 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3822
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 IEM_MC_END();
3825 }
3826 else
3827 {
3828 /*
3829 * Register, memory256.
3830 */
3831 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3832 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3834
3835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3836 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3837 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3838 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3839
3840 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3841 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3842
3843 IEM_MC_ADVANCE_RIP_AND_FINISH();
3844 IEM_MC_END();
3845 }
3846}
3847
3848
3849/* Opcode VEX.0F 0x70 - invalid */
3850
3851
3852/**
3853 * Common worker for AVX/AVX2 instructions on the forms:
3854 * - vpxxx xmm0, xmm2/mem128, imm8
3855 * - vpxxx ymm0, ymm2/mem256, imm8
3856 *
3857 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3858 */
3859FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3860{
3861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3862 if (IEM_IS_MODRM_REG_MODE(bRm))
3863 {
3864 /*
3865 * Register, register.
3866 */
3867 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3868 if (pVCpu->iem.s.uVexLength)
3869 {
3870 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3871 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3872 IEM_MC_LOCAL(RTUINT256U, uDst);
3873 IEM_MC_LOCAL(RTUINT256U, uSrc);
3874 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3875 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3876 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3878 IEM_MC_PREPARE_AVX_USAGE();
3879 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3880 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3881 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3882 IEM_MC_ADVANCE_RIP_AND_FINISH();
3883 IEM_MC_END();
3884 }
3885 else
3886 {
3887 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3888 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3889 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3890 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3891 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3893 IEM_MC_PREPARE_AVX_USAGE();
3894 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3895 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3896 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3897 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3898 IEM_MC_ADVANCE_RIP_AND_FINISH();
3899 IEM_MC_END();
3900 }
3901 }
3902 else
3903 {
3904 /*
3905 * Register, memory.
3906 */
3907 if (pVCpu->iem.s.uVexLength)
3908 {
3909 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3910 IEM_MC_LOCAL(RTUINT256U, uDst);
3911 IEM_MC_LOCAL(RTUINT256U, uSrc);
3912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3913 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3914 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3915
3916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3917 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3918 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3919 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3920 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3921 IEM_MC_PREPARE_AVX_USAGE();
3922
3923 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3924 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3925 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3926
3927 IEM_MC_ADVANCE_RIP_AND_FINISH();
3928 IEM_MC_END();
3929 }
3930 else
3931 {
3932 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3933 IEM_MC_LOCAL(RTUINT128U, uSrc);
3934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3935 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3936 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3937
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3939 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3940 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3941 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3943 IEM_MC_PREPARE_AVX_USAGE();
3944
3945 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3946 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3947 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3948 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3949
3950 IEM_MC_ADVANCE_RIP_AND_FINISH();
3951 IEM_MC_END();
3952 }
3953 }
3954}
3955
3956
3957/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3958FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3959{
3960 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3961 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3962 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3963
3964}
3965
3966
3967/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3968FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3969{
3970 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3971 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3972 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3973
3974}
3975
3976
3977/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3978FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3979{
3980 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3981 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3982 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3983}
3984
3985
3986/**
3987 * Common worker(s) for AVX/AVX2 instructions on the forms:
3988 * - vpxxx xmm0, xmm2, imm8
3989 * - vpxxx ymm0, ymm2, imm8
3990 *
3991 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3992 */
3993FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3994{
3995 if (IEM_IS_MODRM_REG_MODE(bRm))
3996 {
3997 /*
3998 * Register, register.
3999 */
4000 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4002 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4003 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4004 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4005 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4006 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4007 IEM_MC_PREPARE_AVX_USAGE();
4008 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4009 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4010 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4011 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4012 IEM_MC_ADVANCE_RIP_AND_FINISH();
4013 IEM_MC_END();
4014 }
4015 /* No memory operand. */
4016 else
4017 IEMOP_RAISE_INVALID_OPCODE_RET();
4018}
4019
4020FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4021{
4022 if (IEM_IS_MODRM_REG_MODE(bRm))
4023 {
4024 /*
4025 * Register, register.
4026 */
4027 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4028 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4029 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4030 IEM_MC_LOCAL(RTUINT256U, uDst);
4031 IEM_MC_LOCAL(RTUINT256U, uSrc);
4032 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4033 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4034 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4035 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4036 IEM_MC_PREPARE_AVX_USAGE();
4037 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4038 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4039 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4040 IEM_MC_ADVANCE_RIP_AND_FINISH();
4041 IEM_MC_END();
4042 }
4043 /* No memory operand. */
4044 else
4045 IEMOP_RAISE_INVALID_OPCODE_RET();
4046}
4047
4048
4049/* Opcode VEX.0F 0x71 11/2 - invalid. */
4050/** Opcode VEX.66.0F 0x71 11/2. */
4051FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4054 if (pVCpu->iem.s.uVexLength)
4055 {
4056 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4057 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4058 }
4059 else
4060 {
4061 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4062 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4063 }
4064}
4065
4066
4067/* Opcode VEX.0F 0x71 11/4 - invalid */
4068/** Opcode VEX.66.0F 0x71 11/4. */
4069FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4070{
4071 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4072 if (pVCpu->iem.s.uVexLength)
4073 {
4074 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4075 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4076 }
4077 else
4078 {
4079 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4080 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4081 }
4082}
4083
4084/* Opcode VEX.0F 0x71 11/6 - invalid */
4085
4086/** Opcode VEX.66.0F 0x71 11/6. */
4087FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4088{
4089 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4090 if (pVCpu->iem.s.uVexLength)
4091 {
4092 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4093 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4094 }
4095 else
4096 {
4097 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4098 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4099 }
4100}
4101
4102
4103/**
4104 * VEX Group 12 jump table for register variant.
4105 */
4106IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4107{
4108 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4109 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4110 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4111 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4112 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4113 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4114 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4115 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4116};
4117AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4118
4119
4120/** Opcode VEX.0F 0x71. */
4121FNIEMOP_DEF(iemOp_VGrp12)
4122{
4123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4124 if (IEM_IS_MODRM_REG_MODE(bRm))
4125 /* register, register */
4126 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4127 + pVCpu->iem.s.idxPrefix], bRm);
4128 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4129}
4130
4131
4132/* Opcode VEX.0F 0x72 11/2 - invalid. */
4133/** Opcode VEX.66.0F 0x72 11/2. */
4134FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4135{
4136 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4137 if (pVCpu->iem.s.uVexLength)
4138 {
4139 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4140 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4141 }
4142 else
4143 {
4144 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4145 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4146 }
4147}
4148
4149
4150/* Opcode VEX.0F 0x72 11/4 - invalid. */
4151/** Opcode VEX.66.0F 0x72 11/4. */
4152FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4153{
4154 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4155 if (pVCpu->iem.s.uVexLength)
4156 {
4157 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4158 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4159 }
4160 else
4161 {
4162 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4163 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4164 }
4165}
4166
4167/* Opcode VEX.0F 0x72 11/6 - invalid. */
4168
4169/** Opcode VEX.66.0F 0x72 11/6. */
4170FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4171{
4172 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4173 if (pVCpu->iem.s.uVexLength)
4174 {
4175 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4176 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4177 }
4178 else
4179 {
4180 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4181 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4182 }
4183}
4184
4185
4186/**
4187 * Group 13 jump table for register variant.
4188 */
4189IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4190{
4191 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4192 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4193 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4194 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4195 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4196 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4197 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4198 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4199};
4200AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4201
4202/** Opcode VEX.0F 0x72. */
4203FNIEMOP_DEF(iemOp_VGrp13)
4204{
4205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4206 if (IEM_IS_MODRM_REG_MODE(bRm))
4207 /* register, register */
4208 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4209 + pVCpu->iem.s.idxPrefix], bRm);
4210 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4211}
4212
4213
4214/* Opcode VEX.0F 0x73 11/2 - invalid. */
4215/** Opcode VEX.66.0F 0x73 11/2. */
4216FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4217{
4218 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4219 if (pVCpu->iem.s.uVexLength)
4220 {
4221 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4222 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4223 }
4224 else
4225 {
4226 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4227 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4228 }
4229}
4230
4231
4232/** Opcode VEX.66.0F 0x73 11/3. */
4233FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4234{
4235 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4236 if (pVCpu->iem.s.uVexLength)
4237 {
4238 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4239 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4240 }
4241 else
4242 {
4243 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4244 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4245 }
4246}
4247
4248/* Opcode VEX.0F 0x73 11/6 - invalid. */
4249
4250/** Opcode VEX.66.0F 0x73 11/6. */
4251FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4252{
4253 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4254 if (pVCpu->iem.s.uVexLength)
4255 {
4256 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4257 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4258 }
4259 else
4260 {
4261 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4262 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4263 }
4264}
4265
4266/** Opcode VEX.66.0F 0x73 11/7. */
4267FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4268{
4269 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4270 if (pVCpu->iem.s.uVexLength)
4271 {
4272 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4273 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4274 }
4275 else
4276 {
4277 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4278 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4279 }
4280}
4281
4282/* Opcode VEX.0F 0x73 11/6 - invalid. */
4283
4284/**
4285 * Group 14 jump table for register variant.
4286 */
4287IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4288{
4289 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4290 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4291 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4292 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4293 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4294 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4295 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4296 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4297};
4298AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4299
4300
4301/** Opcode VEX.0F 0x73. */
4302FNIEMOP_DEF(iemOp_VGrp14)
4303{
4304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4305 if (IEM_IS_MODRM_REG_MODE(bRm))
4306 /* register, register */
4307 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4308 + pVCpu->iem.s.idxPrefix], bRm);
4309 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4310}
4311
4312
4313/* Opcode VEX.0F 0x74 - invalid */
4314
4315
4316/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4317FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4318{
4319 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4320 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4321 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4322}
4323
4324/* Opcode VEX.F3.0F 0x74 - invalid */
4325/* Opcode VEX.F2.0F 0x74 - invalid */
4326
4327
4328/* Opcode VEX.0F 0x75 - invalid */
4329
4330
4331/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4332FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4333{
4334 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4335 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4336 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4337}
4338
4339
4340/* Opcode VEX.F3.0F 0x75 - invalid */
4341/* Opcode VEX.F2.0F 0x75 - invalid */
4342
4343
4344/* Opcode VEX.0F 0x76 - invalid */
4345
4346
4347/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4348FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4349{
4350 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4351 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4352 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4353}
4354
4355
4356/* Opcode VEX.F3.0F 0x76 - invalid */
4357/* Opcode VEX.F2.0F 0x76 - invalid */
4358
4359
4360/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4361FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4362{
4363 Assert(pVCpu->iem.s.uVexLength <= 1);
4364 if (pVCpu->iem.s.uVexLength == 0)
4365 {
4366 /*
4367 * 128-bit: vzeroupper
4368 */
4369 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4370 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4371
4372 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4374 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4375
4376 IEM_MC_CLEAR_YREG_128_UP(0);
4377 IEM_MC_CLEAR_YREG_128_UP(1);
4378 IEM_MC_CLEAR_YREG_128_UP(2);
4379 IEM_MC_CLEAR_YREG_128_UP(3);
4380 IEM_MC_CLEAR_YREG_128_UP(4);
4381 IEM_MC_CLEAR_YREG_128_UP(5);
4382 IEM_MC_CLEAR_YREG_128_UP(6);
4383 IEM_MC_CLEAR_YREG_128_UP(7);
4384
4385 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4386 {
4387 IEM_MC_CLEAR_YREG_128_UP( 8);
4388 IEM_MC_CLEAR_YREG_128_UP( 9);
4389 IEM_MC_CLEAR_YREG_128_UP(10);
4390 IEM_MC_CLEAR_YREG_128_UP(11);
4391 IEM_MC_CLEAR_YREG_128_UP(12);
4392 IEM_MC_CLEAR_YREG_128_UP(13);
4393 IEM_MC_CLEAR_YREG_128_UP(14);
4394 IEM_MC_CLEAR_YREG_128_UP(15);
4395 }
4396
4397 IEM_MC_ADVANCE_RIP_AND_FINISH();
4398 IEM_MC_END();
4399 }
4400 else
4401 {
4402 /*
4403 * 256-bit: vzeroall
4404 */
4405 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4406 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4407
4408 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4409 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4410 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4411
4412 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4413 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4414 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4415 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4416 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4417 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4418 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4419 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4420 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4421
4422 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4423 {
4424 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4425 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4426 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4427 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4428 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4429 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4430 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4431 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4432 }
4433
4434 IEM_MC_ADVANCE_RIP_AND_FINISH();
4435 IEM_MC_END();
4436 }
4437}
4438
4439
4440/* Opcode VEX.66.0F 0x77 - invalid */
4441/* Opcode VEX.F3.0F 0x77 - invalid */
4442/* Opcode VEX.F2.0F 0x77 - invalid */
4443
4444/* Opcode VEX.0F 0x78 - invalid */
4445/* Opcode VEX.66.0F 0x78 - invalid */
4446/* Opcode VEX.F3.0F 0x78 - invalid */
4447/* Opcode VEX.F2.0F 0x78 - invalid */
4448
4449/* Opcode VEX.0F 0x79 - invalid */
4450/* Opcode VEX.66.0F 0x79 - invalid */
4451/* Opcode VEX.F3.0F 0x79 - invalid */
4452/* Opcode VEX.F2.0F 0x79 - invalid */
4453
4454/* Opcode VEX.0F 0x7a - invalid */
4455/* Opcode VEX.66.0F 0x7a - invalid */
4456/* Opcode VEX.F3.0F 0x7a - invalid */
4457/* Opcode VEX.F2.0F 0x7a - invalid */
4458
4459/* Opcode VEX.0F 0x7b - invalid */
4460/* Opcode VEX.66.0F 0x7b - invalid */
4461/* Opcode VEX.F3.0F 0x7b - invalid */
4462/* Opcode VEX.F2.0F 0x7b - invalid */
4463
4464/* Opcode VEX.0F 0x7c - invalid */
4465
4466
4467/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4468FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
4469{
4470 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4471 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
4472 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4473}
4474
4475
4476/* Opcode VEX.F3.0F 0x7c - invalid */
4477
4478
4479/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4480FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4481{
4482 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4483 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4485}
4486
4487
4488/* Opcode VEX.0F 0x7d - invalid */
4489
4490
4491/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4492FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
4493{
4494 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4495 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
4496 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4497}
4498
4499
4500/* Opcode VEX.F3.0F 0x7d - invalid */
4501
4502
4503/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4504FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
4505{
4506 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4507 IEMOPMEDIAF3_INIT_VARS( vhsubps);
4508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4509}
4510
4511
4512/* Opcode VEX.0F 0x7e - invalid */
4513
4514FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4515{
4516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4517 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4518 {
4519 /**
4520 * @opcode 0x7e
4521 * @opcodesub rex.w=1
4522 * @oppfx 0x66
4523 * @opcpuid avx
4524 * @opgroup og_avx_simdint_datamov
4525 * @opxcpttype 5
4526 * @optest 64-bit / op1=1 op2=2 -> op1=2
4527 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4528 */
4529 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4530 if (IEM_IS_MODRM_REG_MODE(bRm))
4531 {
4532 /* greg64, XMM */
4533 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4534 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4535 IEM_MC_LOCAL(uint64_t, u64Tmp);
4536
4537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4539
4540 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4542
4543 IEM_MC_ADVANCE_RIP_AND_FINISH();
4544 IEM_MC_END();
4545 }
4546 else
4547 {
4548 /* [mem64], XMM */
4549 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4551 IEM_MC_LOCAL(uint64_t, u64Tmp);
4552
4553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4554 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4555 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4556 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4557
4558 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4559 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4560
4561 IEM_MC_ADVANCE_RIP_AND_FINISH();
4562 IEM_MC_END();
4563 }
4564 }
4565 else
4566 {
4567 /**
4568 * @opdone
4569 * @opcode 0x7e
4570 * @opcodesub rex.w=0
4571 * @oppfx 0x66
4572 * @opcpuid avx
4573 * @opgroup og_avx_simdint_datamov
4574 * @opxcpttype 5
4575 * @opfunction iemOp_vmovd_q_Vy_Ey
4576 * @optest op1=1 op2=2 -> op1=2
4577 * @optest op1=0 op2=-42 -> op1=-42
4578 */
4579 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4580 if (IEM_IS_MODRM_REG_MODE(bRm))
4581 {
4582 /* greg32, XMM */
4583 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4584 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4585 IEM_MC_LOCAL(uint32_t, u32Tmp);
4586
4587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4589
4590 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4591 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4592
4593 IEM_MC_ADVANCE_RIP_AND_FINISH();
4594 IEM_MC_END();
4595 }
4596 else
4597 {
4598 /* [mem32], XMM */
4599 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4601 IEM_MC_LOCAL(uint32_t, u32Tmp);
4602
4603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4604 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4605 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4606 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4607
4608 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4609 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4610
4611 IEM_MC_ADVANCE_RIP_AND_FINISH();
4612 IEM_MC_END();
4613 }
4614 }
4615}
4616
4617
4618/**
4619 * @opcode 0x7e
4620 * @oppfx 0xf3
4621 * @opcpuid avx
4622 * @opgroup og_avx_pcksclr_datamove
4623 * @opxcpttype none
4624 * @optest op1=1 op2=2 -> op1=2
4625 * @optest op1=0 op2=-42 -> op1=-42
4626 */
4627FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4628{
4629 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4631 if (IEM_IS_MODRM_REG_MODE(bRm))
4632 {
4633 /*
4634 * Register, register.
4635 */
4636 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4637 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4638
4639 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4640 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4641
4642 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4643 IEM_GET_MODRM_RM(pVCpu, bRm));
4644 IEM_MC_ADVANCE_RIP_AND_FINISH();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 /*
4650 * Memory, register.
4651 */
4652 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4653 IEM_MC_LOCAL(uint64_t, uSrc);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4655
4656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4657 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4658 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4659 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4660
4661 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4662 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4663
4664 IEM_MC_ADVANCE_RIP_AND_FINISH();
4665 IEM_MC_END();
4666 }
4667
4668}
4669/* Opcode VEX.F2.0F 0x7e - invalid */
4670
4671
4672/* Opcode VEX.0F 0x7f - invalid */
4673
4674/**
4675 * @opcode 0x7f
4676 * @oppfx 0x66
4677 * @opcpuid avx
4678 * @opgroup og_avx_simdint_datamove
4679 * @opxcpttype 1
4680 * @optest op1=1 op2=2 -> op1=2
4681 * @optest op1=0 op2=-42 -> op1=-42
4682 */
4683FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4684{
4685 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4686 Assert(pVCpu->iem.s.uVexLength <= 1);
4687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4688 if (IEM_IS_MODRM_REG_MODE(bRm))
4689 {
4690 /*
4691 * Register, register.
4692 */
4693 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4694 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4695
4696 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4697 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4698 if (pVCpu->iem.s.uVexLength == 0)
4699 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4700 IEM_GET_MODRM_REG(pVCpu, bRm));
4701 else
4702 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4703 IEM_GET_MODRM_REG(pVCpu, bRm));
4704 IEM_MC_ADVANCE_RIP_AND_FINISH();
4705 IEM_MC_END();
4706 }
4707 else if (pVCpu->iem.s.uVexLength == 0)
4708 {
4709 /*
4710 * Register, memory128.
4711 */
4712 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4713 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4715
4716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4717 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4720
4721 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4722 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4723
4724 IEM_MC_ADVANCE_RIP_AND_FINISH();
4725 IEM_MC_END();
4726 }
4727 else
4728 {
4729 /*
4730 * Register, memory256.
4731 */
4732 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4733 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4735
4736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4737 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4738 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4739 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4740
4741 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4742 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4743
4744 IEM_MC_ADVANCE_RIP_AND_FINISH();
4745 IEM_MC_END();
4746 }
4747}
4748
4749
4750/**
4751 * @opcode 0x7f
4752 * @oppfx 0xf3
4753 * @opcpuid avx
4754 * @opgroup og_avx_simdint_datamove
4755 * @opxcpttype 4UA
4756 * @optest op1=1 op2=2 -> op1=2
4757 * @optest op1=0 op2=-42 -> op1=-42
4758 */
4759FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4760{
4761 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4762 Assert(pVCpu->iem.s.uVexLength <= 1);
4763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4764 if (IEM_IS_MODRM_REG_MODE(bRm))
4765 {
4766 /*
4767 * Register, register.
4768 */
4769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4770 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4771
4772 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4773 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4774 if (pVCpu->iem.s.uVexLength == 0)
4775 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4776 IEM_GET_MODRM_REG(pVCpu, bRm));
4777 else
4778 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4779 IEM_GET_MODRM_REG(pVCpu, bRm));
4780 IEM_MC_ADVANCE_RIP_AND_FINISH();
4781 IEM_MC_END();
4782 }
4783 else if (pVCpu->iem.s.uVexLength == 0)
4784 {
4785 /*
4786 * Register, memory128.
4787 */
4788 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4789 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4791
4792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4793 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4794 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4795 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4796
4797 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4798 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4799
4800 IEM_MC_ADVANCE_RIP_AND_FINISH();
4801 IEM_MC_END();
4802 }
4803 else
4804 {
4805 /*
4806 * Register, memory256.
4807 */
4808 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4809 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4811
4812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4813 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4814 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4815 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4816
4817 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4818 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4819
4820 IEM_MC_ADVANCE_RIP_AND_FINISH();
4821 IEM_MC_END();
4822 }
4823}
4824
4825/* Opcode VEX.F2.0F 0x7f - invalid */
4826
4827
4828/* Opcode VEX.0F 0x80 - invalid */
4829/* Opcode VEX.0F 0x81 - invalid */
4830/* Opcode VEX.0F 0x82 - invalid */
4831/* Opcode VEX.0F 0x83 - invalid */
4832/* Opcode VEX.0F 0x84 - invalid */
4833/* Opcode VEX.0F 0x85 - invalid */
4834/* Opcode VEX.0F 0x86 - invalid */
4835/* Opcode VEX.0F 0x87 - invalid */
4836/* Opcode VEX.0F 0x88 - invalid */
4837/* Opcode VEX.0F 0x89 - invalid */
4838/* Opcode VEX.0F 0x8a - invalid */
4839/* Opcode VEX.0F 0x8b - invalid */
4840/* Opcode VEX.0F 0x8c - invalid */
4841/* Opcode VEX.0F 0x8d - invalid */
4842/* Opcode VEX.0F 0x8e - invalid */
4843/* Opcode VEX.0F 0x8f - invalid */
4844/* Opcode VEX.0F 0x90 - invalid */
4845/* Opcode VEX.0F 0x91 - invalid */
4846/* Opcode VEX.0F 0x92 - invalid */
4847/* Opcode VEX.0F 0x93 - invalid */
4848/* Opcode VEX.0F 0x94 - invalid */
4849/* Opcode VEX.0F 0x95 - invalid */
4850/* Opcode VEX.0F 0x96 - invalid */
4851/* Opcode VEX.0F 0x97 - invalid */
4852/* Opcode VEX.0F 0x98 - invalid */
4853/* Opcode VEX.0F 0x99 - invalid */
4854/* Opcode VEX.0F 0x9a - invalid */
4855/* Opcode VEX.0F 0x9b - invalid */
4856/* Opcode VEX.0F 0x9c - invalid */
4857/* Opcode VEX.0F 0x9d - invalid */
4858/* Opcode VEX.0F 0x9e - invalid */
4859/* Opcode VEX.0F 0x9f - invalid */
4860/* Opcode VEX.0F 0xa0 - invalid */
4861/* Opcode VEX.0F 0xa1 - invalid */
4862/* Opcode VEX.0F 0xa2 - invalid */
4863/* Opcode VEX.0F 0xa3 - invalid */
4864/* Opcode VEX.0F 0xa4 - invalid */
4865/* Opcode VEX.0F 0xa5 - invalid */
4866/* Opcode VEX.0F 0xa6 - invalid */
4867/* Opcode VEX.0F 0xa7 - invalid */
4868/* Opcode VEX.0F 0xa8 - invalid */
4869/* Opcode VEX.0F 0xa9 - invalid */
4870/* Opcode VEX.0F 0xaa - invalid */
4871/* Opcode VEX.0F 0xab - invalid */
4872/* Opcode VEX.0F 0xac - invalid */
4873/* Opcode VEX.0F 0xad - invalid */
4874
4875
4876/* Opcode VEX.0F 0xae mem/0 - invalid. */
4877/* Opcode VEX.0F 0xae mem/1 - invalid. */
4878
4879/**
4880 * @ opmaps grp15
4881 * @ opcode !11/2
4882 * @ oppfx none
4883 * @ opcpuid sse
4884 * @ opgroup og_sse_mxcsrsm
4885 * @ opxcpttype 5
4886 * @ optest op1=0 -> mxcsr=0
4887 * @ optest op1=0x2083 -> mxcsr=0x2083
4888 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4889 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4890 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4891 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4892 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4893 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4894 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4895 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4896 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4897 */
4898FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4899//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4900//{
4901// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4902// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4903// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4904// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4905// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4906// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4907// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4908// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4909// IEM_MC_END();
4910// return VINF_SUCCESS;
4911//}
4912
4913
4914/**
4915 * @opmaps vexgrp15
4916 * @opcode !11/3
4917 * @oppfx none
4918 * @opcpuid avx
4919 * @opgroup og_avx_mxcsrsm
4920 * @opxcpttype 5
4921 * @optest mxcsr=0 -> op1=0
4922 * @optest mxcsr=0x2083 -> op1=0x2083
4923 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4924 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4925 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4926 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4927 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4928 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4929 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4930 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4931 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4932 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4933 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4934 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4935 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4936 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4937 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4938 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4939 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4940 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4941 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4942 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4943 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4944 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4945 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4946 * -> value.xcpt=0x6
4947 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4948 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4949 * APMv4 rev 3.17 page 509.
4950 * @todo Test this instruction on AMD Ryzen.
4951 */
4952FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4953{
4954 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4955 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4956 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4958 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4959 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4960 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4961 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4962 IEM_MC_END();
4963}
4964
4965/* Opcode VEX.0F 0xae mem/4 - invalid. */
4966/* Opcode VEX.0F 0xae mem/5 - invalid. */
4967/* Opcode VEX.0F 0xae mem/6 - invalid. */
4968/* Opcode VEX.0F 0xae mem/7 - invalid. */
4969
4970/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4971/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4972/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4973/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4974/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4975/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4976/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4977/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4978
4979/**
4980 * Vex group 15 jump table for memory variant.
4981 */
4982IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4983{ /* pfx: none, 066h, 0f3h, 0f2h */
4984 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4985 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4986 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4987 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4988 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4989 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4990 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4991 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4992};
4993AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4994
4995
4996/** Opcode vex. 0xae. */
4997FNIEMOP_DEF(iemOp_VGrp15)
4998{
4999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5000 if (IEM_IS_MODRM_REG_MODE(bRm))
5001 /* register, register */
5002 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5003
5004 /* memory, register */
5005 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5006 + pVCpu->iem.s.idxPrefix], bRm);
5007}
5008
5009
5010/* Opcode VEX.0F 0xaf - invalid. */
5011
5012/* Opcode VEX.0F 0xb0 - invalid. */
5013/* Opcode VEX.0F 0xb1 - invalid. */
5014/* Opcode VEX.0F 0xb2 - invalid. */
5015/* Opcode VEX.0F 0xb2 - invalid. */
5016/* Opcode VEX.0F 0xb3 - invalid. */
5017/* Opcode VEX.0F 0xb4 - invalid. */
5018/* Opcode VEX.0F 0xb5 - invalid. */
5019/* Opcode VEX.0F 0xb6 - invalid. */
5020/* Opcode VEX.0F 0xb7 - invalid. */
5021/* Opcode VEX.0F 0xb8 - invalid. */
5022/* Opcode VEX.0F 0xb9 - invalid. */
5023/* Opcode VEX.0F 0xba - invalid. */
5024/* Opcode VEX.0F 0xbb - invalid. */
5025/* Opcode VEX.0F 0xbc - invalid. */
5026/* Opcode VEX.0F 0xbd - invalid. */
5027/* Opcode VEX.0F 0xbe - invalid. */
5028/* Opcode VEX.0F 0xbf - invalid. */
5029
5030/* Opcode VEX.0F 0xc0 - invalid. */
5031/* Opcode VEX.66.0F 0xc0 - invalid. */
5032/* Opcode VEX.F3.0F 0xc0 - invalid. */
5033/* Opcode VEX.F2.0F 0xc0 - invalid. */
5034
5035/* Opcode VEX.0F 0xc1 - invalid. */
5036/* Opcode VEX.66.0F 0xc1 - invalid. */
5037/* Opcode VEX.F3.0F 0xc1 - invalid. */
5038/* Opcode VEX.F2.0F 0xc1 - invalid. */
5039
5040/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5041FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
5042/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5043FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
5044/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5045FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
5046/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5047FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
5048
5049/* Opcode VEX.0F 0xc3 - invalid */
5050/* Opcode VEX.66.0F 0xc3 - invalid */
5051/* Opcode VEX.F3.0F 0xc3 - invalid */
5052/* Opcode VEX.F2.0F 0xc3 - invalid */
5053
5054/* Opcode VEX.0F 0xc4 - invalid */
5055
5056
5057/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5058FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5059{
5060 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5062 if (IEM_IS_MODRM_REG_MODE(bRm))
5063 {
5064 /*
5065 * Register, register.
5066 */
5067 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5068 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5069 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5070 IEM_MC_LOCAL(uint16_t, uValue);
5071
5072 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5073 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5074 IEM_MC_PREPARE_AVX_USAGE();
5075
5076 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5077 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5078 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5079 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5080 IEM_MC_ADVANCE_RIP_AND_FINISH();
5081 IEM_MC_END();
5082 }
5083 else
5084 {
5085 /*
5086 * Register, memory.
5087 */
5088 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5090 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5091 IEM_MC_LOCAL(uint16_t, uValue);
5092
5093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5094 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5095 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5096 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5097 IEM_MC_PREPARE_AVX_USAGE();
5098
5099 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5100 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5101 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5102 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5103 IEM_MC_ADVANCE_RIP_AND_FINISH();
5104 IEM_MC_END();
5105 }
5106}
5107
5108
5109/* Opcode VEX.F3.0F 0xc4 - invalid */
5110/* Opcode VEX.F2.0F 0xc4 - invalid */
5111
5112/* Opcode VEX.0F 0xc5 - invalid */
5113
5114
5115/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
5116FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
5117{
5118 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 if (IEM_IS_MODRM_REG_MODE(bRm))
5121 {
5122 /*
5123 * greg32, XMM, imm8.
5124 */
5125 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5126 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5127 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5128 IEM_MC_LOCAL(uint16_t, uValue);
5129 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5130 IEM_MC_PREPARE_AVX_USAGE();
5131 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
5132 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
5133 IEM_MC_ADVANCE_RIP_AND_FINISH();
5134 IEM_MC_END();
5135 }
5136 /* No memory operand. */
5137 else
5138 IEMOP_RAISE_INVALID_OPCODE_RET();
5139}
5140
5141
5142/* Opcode VEX.F3.0F 0xc5 - invalid */
5143/* Opcode VEX.F2.0F 0xc5 - invalid */
5144
5145
5146#define VSHUFP_X(a_Instr) \
5147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5148 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5149 { \
5150 /* \
5151 * Register, register. \
5152 */ \
5153 if (pVCpu->iem.s.uVexLength) \
5154 { \
5155 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5157 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5158 IEM_MC_LOCAL(RTUINT256U, uDst); \
5159 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5160 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5161 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5162 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5163 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5164 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5165 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5166 IEM_MC_PREPARE_AVX_USAGE(); \
5167 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5168 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5169 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5170 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5171 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5173 IEM_MC_END(); \
5174 } \
5175 else \
5176 { \
5177 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5178 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5179 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5180 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5181 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5182 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
5183 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5185 IEM_MC_PREPARE_AVX_USAGE(); \
5186 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5187 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5188 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5189 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5190 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5191 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5193 IEM_MC_END(); \
5194 } \
5195 } \
5196 else \
5197 { \
5198 /* \
5199 * Register, memory. \
5200 */ \
5201 if (pVCpu->iem.s.uVexLength) \
5202 { \
5203 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5204 IEM_MC_LOCAL(RTUINT256U, uDst); \
5205 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5206 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5208 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5209 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5210 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5212 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5213 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5214 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5215 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5216 IEM_MC_PREPARE_AVX_USAGE(); \
5217 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5218 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5219 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5220 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5221 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5223 IEM_MC_END(); \
5224 } \
5225 else \
5226 { \
5227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5228 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
5229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5230 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5231 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5232 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
5233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5234 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5235 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5236 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5237 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5238 IEM_MC_PREPARE_AVX_USAGE(); \
5239 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5240 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5241 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5242 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5243 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5244 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5246 IEM_MC_END(); \
5247 } \
5248 } \
5249 (void)0
5250
5251/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
5252FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
5253{
5254 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5255 VSHUFP_X(vshufps);
5256}
5257
5258
5259/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
5260FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
5261{
5262 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5263 VSHUFP_X(vshufpd);
5264}
5265#undef VSHUFP_X
5266
5267
5268/* Opcode VEX.F3.0F 0xc6 - invalid */
5269/* Opcode VEX.F2.0F 0xc6 - invalid */
5270
5271/* Opcode VEX.0F 0xc7 - invalid */
5272/* Opcode VEX.66.0F 0xc7 - invalid */
5273/* Opcode VEX.F3.0F 0xc7 - invalid */
5274/* Opcode VEX.F2.0F 0xc7 - invalid */
5275
5276/* Opcode VEX.0F 0xc8 - invalid */
5277/* Opcode VEX.0F 0xc9 - invalid */
5278/* Opcode VEX.0F 0xca - invalid */
5279/* Opcode VEX.0F 0xcb - invalid */
5280/* Opcode VEX.0F 0xcc - invalid */
5281/* Opcode VEX.0F 0xcd - invalid */
5282/* Opcode VEX.0F 0xce - invalid */
5283/* Opcode VEX.0F 0xcf - invalid */
5284
5285
5286/* Opcode VEX.0F 0xd0 - invalid */
5287
5288
5289/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
5290FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
5291{
5292 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5293 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
5294 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5295}
5296
5297
5298/* Opcode VEX.F3.0F 0xd0 - invalid */
5299
5300
5301/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
5302FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
5303{
5304 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5305 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
5306 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5307}
5308
5309
5310/* Opcode VEX.0F 0xd1 - invalid */
5311
5312
5313/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
5314FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
5315{
5316 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5317 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
5318 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5319}
5320
5321/* Opcode VEX.F3.0F 0xd1 - invalid */
5322/* Opcode VEX.F2.0F 0xd1 - invalid */
5323
5324/* Opcode VEX.0F 0xd2 - invalid */
5325/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
5326FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
5327{
5328 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5329 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
5330 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5331}
5332
5333/* Opcode VEX.F3.0F 0xd2 - invalid */
5334/* Opcode VEX.F2.0F 0xd2 - invalid */
5335
5336/* Opcode VEX.0F 0xd3 - invalid */
5337/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
5338FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
5339{
5340 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5341 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
5342 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5343}
5344
5345/* Opcode VEX.F3.0F 0xd3 - invalid */
5346/* Opcode VEX.F2.0F 0xd3 - invalid */
5347
5348/* Opcode VEX.0F 0xd4 - invalid */
5349
5350
5351/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
5352FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
5353{
5354 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5355 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
5356 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5357}
5358
5359
5360/* Opcode VEX.F3.0F 0xd4 - invalid */
5361/* Opcode VEX.F2.0F 0xd4 - invalid */
5362
5363/* Opcode VEX.0F 0xd5 - invalid */
5364
5365
5366/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
5367FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
5368{
5369 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5370 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
5371 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5372}
5373
5374
5375/* Opcode VEX.F3.0F 0xd5 - invalid */
5376/* Opcode VEX.F2.0F 0xd5 - invalid */
5377
5378/* Opcode VEX.0F 0xd6 - invalid */
5379
5380/**
5381 * @opcode 0xd6
5382 * @oppfx 0x66
5383 * @opcpuid avx
5384 * @opgroup og_avx_pcksclr_datamove
5385 * @opxcpttype none
5386 * @optest op1=-1 op2=2 -> op1=2
5387 * @optest op1=0 op2=-42 -> op1=-42
5388 */
5389FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
5390{
5391 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5393 if (IEM_IS_MODRM_REG_MODE(bRm))
5394 {
5395 /*
5396 * Register, register.
5397 */
5398 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5399 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5400
5401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5402 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5403
5404 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5405 IEM_GET_MODRM_REG(pVCpu, bRm));
5406 IEM_MC_ADVANCE_RIP_AND_FINISH();
5407 IEM_MC_END();
5408 }
5409 else
5410 {
5411 /*
5412 * Memory, register.
5413 */
5414 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5415 IEM_MC_LOCAL(uint64_t, uSrc);
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5417
5418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5419 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5421 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5422
5423 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5424 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5425
5426 IEM_MC_ADVANCE_RIP_AND_FINISH();
5427 IEM_MC_END();
5428 }
5429}
5430
5431/* Opcode VEX.F3.0F 0xd6 - invalid */
5432/* Opcode VEX.F2.0F 0xd6 - invalid */
5433
5434
5435/* Opcode VEX.0F 0xd7 - invalid */
5436
5437/** Opcode VEX.66.0F 0xd7 - */
5438FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
5439{
5440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5441 /* Docs says register only. */
5442 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
5443 {
5444 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
5445 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
5446 if (pVCpu->iem.s.uVexLength)
5447 {
5448 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5449 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
5450 IEM_MC_ARG(uint64_t *, puDst, 0);
5451 IEM_MC_LOCAL(RTUINT256U, uSrc);
5452 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
5453 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5454 IEM_MC_PREPARE_AVX_USAGE();
5455 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5456 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5457 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
5458 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
5459 IEM_MC_ADVANCE_RIP_AND_FINISH();
5460 IEM_MC_END();
5461 }
5462 else
5463 {
5464 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5465 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5466 IEM_MC_ARG(uint64_t *, puDst, 0);
5467 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5468 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5469 IEM_MC_PREPARE_AVX_USAGE();
5470 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5472 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5473 IEM_MC_ADVANCE_RIP_AND_FINISH();
5474 IEM_MC_END();
5475 }
5476 }
5477 else
5478 IEMOP_RAISE_INVALID_OPCODE_RET();
5479}
5480
5481
5482/* Opcode VEX.F3.0F 0xd7 - invalid */
5483/* Opcode VEX.F2.0F 0xd7 - invalid */
5484
5485
5486/* Opcode VEX.0F 0xd8 - invalid */
5487
5488/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5489FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5490{
5491 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5492 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5493 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5494}
5495
5496
5497/* Opcode VEX.F3.0F 0xd8 - invalid */
5498/* Opcode VEX.F2.0F 0xd8 - invalid */
5499
5500/* Opcode VEX.0F 0xd9 - invalid */
5501
5502
5503/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5504FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5505{
5506 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5507 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5509}
5510
5511
5512/* Opcode VEX.F3.0F 0xd9 - invalid */
5513/* Opcode VEX.F2.0F 0xd9 - invalid */
5514
5515/* Opcode VEX.0F 0xda - invalid */
5516
5517
5518/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5519FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5520{
5521 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5522 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5523 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5524}
5525
5526
5527/* Opcode VEX.F3.0F 0xda - invalid */
5528/* Opcode VEX.F2.0F 0xda - invalid */
5529
5530/* Opcode VEX.0F 0xdb - invalid */
5531
5532
5533/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5534FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5535{
5536 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5537 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5538 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5539}
5540
5541
5542/* Opcode VEX.F3.0F 0xdb - invalid */
5543/* Opcode VEX.F2.0F 0xdb - invalid */
5544
5545/* Opcode VEX.0F 0xdc - invalid */
5546
5547
5548/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5549FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5550{
5551 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5552 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5553 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5554}
5555
5556
5557/* Opcode VEX.F3.0F 0xdc - invalid */
5558/* Opcode VEX.F2.0F 0xdc - invalid */
5559
5560/* Opcode VEX.0F 0xdd - invalid */
5561
5562
5563/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5564FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5565{
5566 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5567 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5568 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5569}
5570
5571
5572/* Opcode VEX.F3.0F 0xdd - invalid */
5573/* Opcode VEX.F2.0F 0xdd - invalid */
5574
5575/* Opcode VEX.0F 0xde - invalid */
5576
5577
5578/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5579FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5580{
5581 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5582 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5584}
5585
5586
5587/* Opcode VEX.F3.0F 0xde - invalid */
5588/* Opcode VEX.F2.0F 0xde - invalid */
5589
5590/* Opcode VEX.0F 0xdf - invalid */
5591
5592
5593/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5594FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5595{
5596 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5597 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5598 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5599}
5600
5601
5602/* Opcode VEX.F3.0F 0xdf - invalid */
5603/* Opcode VEX.F2.0F 0xdf - invalid */
5604
5605/* Opcode VEX.0F 0xe0 - invalid */
5606
5607
5608/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5609FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5610{
5611 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5612 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5613 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5614}
5615
5616
5617/* Opcode VEX.F3.0F 0xe0 - invalid */
5618/* Opcode VEX.F2.0F 0xe0 - invalid */
5619
5620/* Opcode VEX.0F 0xe1 - invalid */
5621/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5622FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5623{
5624 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5625 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5626 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5627}
5628
5629/* Opcode VEX.F3.0F 0xe1 - invalid */
5630/* Opcode VEX.F2.0F 0xe1 - invalid */
5631
5632/* Opcode VEX.0F 0xe2 - invalid */
5633/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5634FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5635{
5636 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5637 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5638 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5639}
5640
5641/* Opcode VEX.F3.0F 0xe2 - invalid */
5642/* Opcode VEX.F2.0F 0xe2 - invalid */
5643
5644/* Opcode VEX.0F 0xe3 - invalid */
5645
5646
5647/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5648FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5649{
5650 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5651 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5652 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5653}
5654
5655
5656/* Opcode VEX.F3.0F 0xe3 - invalid */
5657/* Opcode VEX.F2.0F 0xe3 - invalid */
5658
5659/* Opcode VEX.0F 0xe4 - invalid */
5660
5661
5662/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5663FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5664{
5665 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5666 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5667 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5668}
5669
5670
5671/* Opcode VEX.F3.0F 0xe4 - invalid */
5672/* Opcode VEX.F2.0F 0xe4 - invalid */
5673
5674/* Opcode VEX.0F 0xe5 - invalid */
5675
5676
5677/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5678FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5679{
5680 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5681 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5682 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5683}
5684
5685
5686/* Opcode VEX.F3.0F 0xe5 - invalid */
5687/* Opcode VEX.F2.0F 0xe5 - invalid */
5688
5689/* Opcode VEX.0F 0xe6 - invalid */
5690/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5691FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5692/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5693FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5694/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5695FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5696
5697
5698/* Opcode VEX.0F 0xe7 - invalid */
5699
5700/**
5701 * @opcode 0xe7
5702 * @opcodesub !11 mr/reg
5703 * @oppfx 0x66
5704 * @opcpuid avx
5705 * @opgroup og_avx_cachect
5706 * @opxcpttype 1
5707 * @optest op1=-1 op2=2 -> op1=2
5708 * @optest op1=0 op2=-42 -> op1=-42
5709 */
5710FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5711{
5712 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5713 Assert(pVCpu->iem.s.uVexLength <= 1);
5714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5715 if (IEM_IS_MODRM_MEM_MODE(bRm))
5716 {
5717 if (pVCpu->iem.s.uVexLength == 0)
5718 {
5719 /*
5720 * 128-bit: Memory, register.
5721 */
5722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5723 IEM_MC_LOCAL(RTUINT128U, uSrc);
5724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5725
5726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5727 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5728 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5729 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5730
5731 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5732 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5733
5734 IEM_MC_ADVANCE_RIP_AND_FINISH();
5735 IEM_MC_END();
5736 }
5737 else
5738 {
5739 /*
5740 * 256-bit: Memory, register.
5741 */
5742 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5743 IEM_MC_LOCAL(RTUINT256U, uSrc);
5744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5745
5746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5747 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5748 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5749 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5750
5751 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5752 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5753
5754 IEM_MC_ADVANCE_RIP_AND_FINISH();
5755 IEM_MC_END();
5756 }
5757 }
5758 /**
5759 * @opdone
5760 * @opmnemonic udvex660fe7reg
5761 * @opcode 0xe7
5762 * @opcodesub 11 mr/reg
5763 * @oppfx 0x66
5764 * @opunused immediate
5765 * @opcpuid avx
5766 * @optest ->
5767 */
5768 else
5769 IEMOP_RAISE_INVALID_OPCODE_RET();
5770}
5771
5772/* Opcode VEX.F3.0F 0xe7 - invalid */
5773/* Opcode VEX.F2.0F 0xe7 - invalid */
5774
5775
5776/* Opcode VEX.0F 0xe8 - invalid */
5777
5778
5779/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5780FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5781{
5782 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5783 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5784 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5785}
5786
5787
5788/* Opcode VEX.F3.0F 0xe8 - invalid */
5789/* Opcode VEX.F2.0F 0xe8 - invalid */
5790
5791/* Opcode VEX.0F 0xe9 - invalid */
5792
5793
5794/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5795FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5796{
5797 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5798 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5799 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5800}
5801
5802
5803/* Opcode VEX.F3.0F 0xe9 - invalid */
5804/* Opcode VEX.F2.0F 0xe9 - invalid */
5805
5806/* Opcode VEX.0F 0xea - invalid */
5807
5808
5809/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5810FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5811{
5812 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5813 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5814 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5815}
5816
5817
5818/* Opcode VEX.F3.0F 0xea - invalid */
5819/* Opcode VEX.F2.0F 0xea - invalid */
5820
5821/* Opcode VEX.0F 0xeb - invalid */
5822
5823
5824/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5825FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5826{
5827 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5828 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5829 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5830}
5831
5832
5833
5834/* Opcode VEX.F3.0F 0xeb - invalid */
5835/* Opcode VEX.F2.0F 0xeb - invalid */
5836
5837/* Opcode VEX.0F 0xec - invalid */
5838
5839
5840/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5841FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5842{
5843 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5844 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5845 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5846}
5847
5848
5849/* Opcode VEX.F3.0F 0xec - invalid */
5850/* Opcode VEX.F2.0F 0xec - invalid */
5851
5852/* Opcode VEX.0F 0xed - invalid */
5853
5854
5855/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5856FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5857{
5858 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5859 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5860 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5861}
5862
5863
5864/* Opcode VEX.F3.0F 0xed - invalid */
5865/* Opcode VEX.F2.0F 0xed - invalid */
5866
5867/* Opcode VEX.0F 0xee - invalid */
5868
5869
5870/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5871FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5872{
5873 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5874 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5875 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5876}
5877
5878
5879/* Opcode VEX.F3.0F 0xee - invalid */
5880/* Opcode VEX.F2.0F 0xee - invalid */
5881
5882
5883/* Opcode VEX.0F 0xef - invalid */
5884
5885
5886/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5887FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5888{
5889 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5890 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5891 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5892}
5893
5894
5895/* Opcode VEX.F3.0F 0xef - invalid */
5896/* Opcode VEX.F2.0F 0xef - invalid */
5897
5898/* Opcode VEX.0F 0xf0 - invalid */
5899/* Opcode VEX.66.0F 0xf0 - invalid */
5900
5901
5902/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5903FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5904{
5905 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5906 Assert(pVCpu->iem.s.uVexLength <= 1);
5907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5908 if (IEM_IS_MODRM_REG_MODE(bRm))
5909 {
5910 /*
5911 * Register, register - (not implemented, assuming it raises \#UD).
5912 */
5913 IEMOP_RAISE_INVALID_OPCODE_RET();
5914 }
5915 else if (pVCpu->iem.s.uVexLength == 0)
5916 {
5917 /*
5918 * Register, memory128.
5919 */
5920 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5921 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5923
5924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5925 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5926 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5927 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5928
5929 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5930 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5931
5932 IEM_MC_ADVANCE_RIP_AND_FINISH();
5933 IEM_MC_END();
5934 }
5935 else
5936 {
5937 /*
5938 * Register, memory256.
5939 */
5940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5941 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5943
5944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5945 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5946 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5947 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5948
5949 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5950 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5951
5952 IEM_MC_ADVANCE_RIP_AND_FINISH();
5953 IEM_MC_END();
5954 }
5955}
5956
5957
5958/* Opcode VEX.0F 0xf1 - invalid */
5959/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5960FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5961{
5962 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5963 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5964 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5965}
5966
5967/* Opcode VEX.F2.0F 0xf1 - invalid */
5968
5969/* Opcode VEX.0F 0xf2 - invalid */
5970/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5971FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5972{
5973 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5974 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5975 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5976}
5977/* Opcode VEX.F2.0F 0xf2 - invalid */
5978
5979/* Opcode VEX.0F 0xf3 - invalid */
5980/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5981FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5982{
5983 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5984 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5985 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5986}
5987/* Opcode VEX.F2.0F 0xf3 - invalid */
5988
5989/* Opcode VEX.0F 0xf4 - invalid */
5990
5991
5992/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5993FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5994{
5995 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5996 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5997 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5998}
5999
6000
6001/* Opcode VEX.F2.0F 0xf4 - invalid */
6002
6003/* Opcode VEX.0F 0xf5 - invalid */
6004
6005
6006/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6007FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6008{
6009 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6010 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6011 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6012}
6013
6014
6015/* Opcode VEX.F2.0F 0xf5 - invalid */
6016
6017/* Opcode VEX.0F 0xf6 - invalid */
6018
6019
6020/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6021FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6022{
6023 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6024 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6025 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6026}
6027
6028
6029/* Opcode VEX.F2.0F 0xf6 - invalid */
6030
6031/* Opcode VEX.0F 0xf7 - invalid */
6032
6033
6034/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6035FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6036{
6037// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6039 if (IEM_IS_MODRM_REG_MODE(bRm))
6040 {
6041 /*
6042 * XMM, XMM, (implicit) [ ER]DI
6043 */
6044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6045 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6046 IEM_MC_LOCAL( uint64_t, u64EffAddr);
6047 IEM_MC_LOCAL( RTUINT128U, u128Mem);
6048 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
6049 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
6050 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
6051 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6052 IEM_MC_PREPARE_AVX_USAGE();
6053
6054 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
6055 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
6056 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6057 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
6058 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
6059 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
6060
6061 IEM_MC_ADVANCE_RIP_AND_FINISH();
6062 IEM_MC_END();
6063 }
6064 else
6065 {
6066 /* The memory, register encoding is invalid. */
6067 IEMOP_RAISE_INVALID_OPCODE_RET();
6068 }
6069}
6070
6071
6072/* Opcode VEX.F2.0F 0xf7 - invalid */
6073
6074/* Opcode VEX.0F 0xf8 - invalid */
6075
6076
6077/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
6078FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
6079{
6080 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6081 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
6082 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6083}
6084
6085
6086/* Opcode VEX.F2.0F 0xf8 - invalid */
6087
6088/* Opcode VEX.0F 0xf9 - invalid */
6089
6090
6091/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
6092FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
6093{
6094 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6095 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
6096 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6097}
6098
6099
6100/* Opcode VEX.F2.0F 0xf9 - invalid */
6101
6102/* Opcode VEX.0F 0xfa - invalid */
6103
6104
6105/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
6106FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
6107{
6108 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6109 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
6110 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6111}
6112
6113
6114/* Opcode VEX.F2.0F 0xfa - invalid */
6115
6116/* Opcode VEX.0F 0xfb - invalid */
6117
6118
6119/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
6120FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
6121{
6122 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6123 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
6124 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6125}
6126
6127
6128/* Opcode VEX.F2.0F 0xfb - invalid */
6129
6130/* Opcode VEX.0F 0xfc - invalid */
6131
6132
6133/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
6134FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
6135{
6136 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6137 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
6138 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6139}
6140
6141
6142/* Opcode VEX.F2.0F 0xfc - invalid */
6143
6144/* Opcode VEX.0F 0xfd - invalid */
6145
6146
6147/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
6148FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
6149{
6150 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6151 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
6152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6153}
6154
6155
6156/* Opcode VEX.F2.0F 0xfd - invalid */
6157
6158/* Opcode VEX.0F 0xfe - invalid */
6159
6160
6161/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
6162FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
6163{
6164 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6165 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
6166 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6167}
6168
6169
6170/* Opcode VEX.F2.0F 0xfe - invalid */
6171
6172
6173/** Opcode **** 0x0f 0xff - UD0 */
6174FNIEMOP_DEF(iemOp_vud0)
6175{
6176/** @todo testcase: vud0 */
6177 IEMOP_MNEMONIC(vud0, "vud0");
6178 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
6179 {
6180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
6181 if (IEM_IS_MODRM_MEM_MODE(bRm))
6182 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
6183 }
6184 IEMOP_HLP_DONE_DECODING();
6185 IEMOP_RAISE_INVALID_OPCODE_RET();
6186}
6187
6188
6189
6190/**
6191 * VEX opcode map \#1.
6192 *
6193 * @sa g_apfnTwoByteMap
6194 */
6195const PFNIEMOP g_apfnVexMap1[] =
6196{
6197 /* no prefix, 066h prefix f3h prefix, f2h prefix */
6198 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
6199 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
6200 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
6201 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
6202 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
6203 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
6204 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
6205 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
6206 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
6207 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
6208 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
6209 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
6210 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
6211 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
6212 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
6213 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
6214
6215 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
6216 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
6217 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
6218 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6219 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6220 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6221 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
6222 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6223 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
6224 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
6225 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
6226 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
6227 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
6228 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
6229 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
6230 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
6231
6232 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
6233 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
6234 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
6235 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
6236 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
6237 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
6238 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
6239 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
6240 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6241 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6242 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
6243 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6244 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
6245 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
6246 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6247 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6248
6249 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
6250 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
6251 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
6252 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
6253 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
6254 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
6255 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
6256 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
6257 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6258 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6259 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6260 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6261 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6262 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6263 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6264 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6265
6266 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
6267 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
6268 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
6269 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
6270 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
6271 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
6272 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
6273 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
6274 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
6275 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
6276 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
6277 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
6278 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
6279 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
6280 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
6281 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
6282
6283 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6284 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
6285 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6286 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6287 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6288 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6289 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6290 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6291 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
6292 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
6293 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
6294 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
6295 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
6296 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
6297 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
6298 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
6299
6300 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6301 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6302 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6303 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6304 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6305 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6306 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6307 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6308 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6309 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6310 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6311 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6312 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6313 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6314 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6315 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
6316
6317 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
6318 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6319 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6320 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6321 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6322 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6323 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6324 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6325 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
6326 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
6327 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
6328 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
6329 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
6330 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
6331 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
6332 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
6333
6334 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
6335 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
6336 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
6337 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
6338 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
6339 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
6340 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
6341 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
6342 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
6343 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
6344 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
6345 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
6346 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
6347 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
6348 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
6349 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
6350
6351 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
6352 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
6353 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
6354 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
6355 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
6356 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
6357 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
6358 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
6359 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
6360 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
6361 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
6362 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
6363 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
6364 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
6365 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
6366 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
6367
6368 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6369 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6370 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6371 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6372 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6373 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6374 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6375 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6376 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6377 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6378 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
6379 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
6380 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
6381 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
6382 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
6383 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
6384
6385 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6386 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6387 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6388 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6389 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6390 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6391 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6392 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6393 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6394 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6395 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
6396 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
6397 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
6398 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
6399 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
6400 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
6401
6402 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6403 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6404 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
6405 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6406 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6407 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6408 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
6409 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6410 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6411 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6412 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
6413 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
6414 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
6415 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
6416 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
6417 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
6418
6419 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
6420 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6421 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6422 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6423 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6424 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6425 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6426 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6427 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6428 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6429 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6430 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6431 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6432 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6433 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6434 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6435
6436 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6437 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6438 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6439 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6440 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6441 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6442 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
6443 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6444 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6445 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6446 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6447 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6448 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6449 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6450 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6451 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6452
6453 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
6454 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6455 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6456 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6457 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6458 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6459 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6460 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6461 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6462 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6463 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6464 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6465 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6466 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6467 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6468 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6469};
6470AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6471/** @} */
6472
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette