VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105311

Last change on this file since 105311 was 105311, checked in by vboxsync, 5 months ago

VMM/IEM: Implement vcvtsi2sd instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 265.2 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105311 2024-07-12 15:33:03Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383
2384
2385/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2386FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2392 {
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /* XMM, greg64 */
2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2399 IEM_MC_PREPARE_AVX_USAGE();
2400
2401 IEM_MC_LOCAL(X86XMMREG, uDst);
2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2408 puDst, puSrc1, pi64Src2);
2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_ADVANCE_RIP_AND_FINISH();
2413 IEM_MC_END();
2414 }
2415 else
2416 {
2417 /* XMM, [mem64] */
2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_PREPARE_AVX_USAGE();
2424
2425 IEM_MC_LOCAL(X86XMMREG, uDst);
2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2429 IEM_MC_LOCAL(int64_t, i64Src2);
2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2433 puDst, puSrc1, pi64Src2);
2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440 }
2441 else
2442 {
2443 if (IEM_IS_MODRM_REG_MODE(bRm))
2444 {
2445 /* XMM, greg32 */
2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2449 IEM_MC_PREPARE_AVX_USAGE();
2450
2451 IEM_MC_LOCAL(X86XMMREG, uDst);
2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2458 puDst, puSrc1, pi32Src2);
2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465 else
2466 {
2467 /* XMM, [mem32] */
2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474
2475 IEM_MC_LOCAL(X86XMMREG, uDst);
2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2479 IEM_MC_LOCAL(int32_t, i32Src2);
2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2483 puDst, puSrc1, pi32Src2);
2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 }
2491}
2492
2493
2494/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2495FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2496{
2497 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2498 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2501 {
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /* XMM, greg64 */
2505 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2506 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2508 IEM_MC_PREPARE_AVX_USAGE();
2509
2510 IEM_MC_LOCAL(X86XMMREG, uDst);
2511 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2514 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2515 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2516 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2517 puDst, puSrc1, pi64Src2);
2518 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2519 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2520 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524 else
2525 {
2526 /* XMM, [mem64] */
2527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2530 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2532 IEM_MC_PREPARE_AVX_USAGE();
2533
2534 IEM_MC_LOCAL(X86XMMREG, uDst);
2535 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2538 IEM_MC_LOCAL(int64_t, i64Src2);
2539 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2540 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2541 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2542 puDst, puSrc1, pi64Src2);
2543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2545 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 }
2550 else
2551 {
2552 if (IEM_IS_MODRM_REG_MODE(bRm))
2553 {
2554 /* XMM, greg32 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_PREPARE_AVX_USAGE();
2559
2560 IEM_MC_LOCAL(X86XMMREG, uDst);
2561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2562 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2564 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2565 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2567 puDst, puSrc1, pi32Src2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2570 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /* XMM, [mem32] */
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2582 IEM_MC_PREPARE_AVX_USAGE();
2583
2584 IEM_MC_LOCAL(X86XMMREG, uDst);
2585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2586 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2587 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2588 IEM_MC_LOCAL(int32_t, i32Src2);
2589 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2590 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2591 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2592 puDst, puSrc1, pi32Src2);
2593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2594 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2595 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_ADVANCE_RIP_AND_FINISH();
2597 IEM_MC_END();
2598 }
2599 }
2600}
2601
2602
2603/**
2604 * @opcode 0x2b
2605 * @opcodesub !11 mr/reg
2606 * @oppfx none
2607 * @opcpuid avx
2608 * @opgroup og_avx_cachect
2609 * @opxcpttype 1
2610 * @optest op1=1 op2=2 -> op1=2
2611 * @optest op1=0 op2=-42 -> op1=-42
2612 * @note Identical implementation to vmovntpd
2613 */
2614FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2615{
2616 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2617 Assert(pVCpu->iem.s.uVexLength <= 1);
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if (IEM_IS_MODRM_MEM_MODE(bRm))
2620 {
2621 /*
2622 * memory, register.
2623 */
2624 if (pVCpu->iem.s.uVexLength == 0)
2625 {
2626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2629
2630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2633 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2636 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2644 IEM_MC_LOCAL(RTUINT256U, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2651
2652 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 }
2659 /* The register, register encoding is invalid. */
2660 else
2661 IEMOP_RAISE_INVALID_OPCODE_RET();
2662}
2663
2664/**
2665 * @opcode 0x2b
2666 * @opcodesub !11 mr/reg
2667 * @oppfx 0x66
2668 * @opcpuid avx
2669 * @opgroup og_avx_cachect
2670 * @opxcpttype 1
2671 * @optest op1=1 op2=2 -> op1=2
2672 * @optest op1=0 op2=-42 -> op1=-42
2673 * @note Identical implementation to vmovntps
2674 */
2675FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2676{
2677 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2678 Assert(pVCpu->iem.s.uVexLength <= 1);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_MEM_MODE(bRm))
2681 {
2682 /*
2683 * memory, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690
2691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2695
2696 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2697 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2712
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2715
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* The register, register encoding is invalid. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725/**
2726 * @opmnemonic udvexf30f2b
2727 * @opcode 0x2b
2728 * @oppfx 0xf3
2729 * @opunused vex.modrm
2730 * @opcpuid avx
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/**
2736 * @opmnemonic udvexf20f2b
2737 * @opcode 0x2b
2738 * @oppfx 0xf2
2739 * @opunused vex.modrm
2740 * @opcpuid avx
2741 * @optest ->
2742 * @opdone
2743 */
2744
2745
2746/* Opcode VEX.0F 0x2c - invalid */
2747/* Opcode VEX.66.0F 0x2c - invalid */
2748/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2749FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2750/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2751FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2752
2753/* Opcode VEX.0F 0x2d - invalid */
2754/* Opcode VEX.66.0F 0x2d - invalid */
2755/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2756FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2757/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2758FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2759
2760
2761/**
2762 * @opcode 0x2e
2763 * @oppfx none
2764 * @opflmodify cf,pf,af,zf,sf,of
2765 * @opflclear af,sf,of
2766 */
2767FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2768{
2769 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2771 if (IEM_IS_MODRM_REG_MODE(bRm))
2772 {
2773 /*
2774 * Register, register.
2775 */
2776 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2777 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2778 IEM_MC_LOCAL(uint32_t, fEFlags);
2779 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2780 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2781 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2782 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2783 IEM_MC_PREPARE_AVX_USAGE();
2784 IEM_MC_FETCH_EFLAGS(fEFlags);
2785 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2786 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2787 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2788 pEFlags, uSrc1, uSrc2);
2789 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2790 IEM_MC_COMMIT_EFLAGS(fEFlags);
2791
2792 IEM_MC_ADVANCE_RIP_AND_FINISH();
2793 IEM_MC_END();
2794 }
2795 else
2796 {
2797 /*
2798 * Register, memory.
2799 */
2800 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2801 IEM_MC_LOCAL(uint32_t, fEFlags);
2802 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2803 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2804 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2806
2807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2808 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2810 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2811
2812 IEM_MC_PREPARE_AVX_USAGE();
2813 IEM_MC_FETCH_EFLAGS(fEFlags);
2814 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2815 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2816 pEFlags, uSrc1, uSrc2);
2817 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2818 IEM_MC_COMMIT_EFLAGS(fEFlags);
2819
2820 IEM_MC_ADVANCE_RIP_AND_FINISH();
2821 IEM_MC_END();
2822 }
2823}
2824
2825
2826/**
2827 * @opcode 0x2e
2828 * @oppfx 0x66
2829 * @opflmodify cf,pf,af,zf,sf,of
2830 * @opflclear af,sf,of
2831 */
2832FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2833{
2834 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2836 if (IEM_IS_MODRM_REG_MODE(bRm))
2837 {
2838 /*
2839 * Register, register.
2840 */
2841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2842 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2843 IEM_MC_LOCAL(uint32_t, fEFlags);
2844 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2845 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2846 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2847 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2848 IEM_MC_PREPARE_AVX_USAGE();
2849 IEM_MC_FETCH_EFLAGS(fEFlags);
2850 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2851 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2852 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2853 pEFlags, uSrc1, uSrc2);
2854 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2855 IEM_MC_COMMIT_EFLAGS(fEFlags);
2856
2857 IEM_MC_ADVANCE_RIP_AND_FINISH();
2858 IEM_MC_END();
2859 }
2860 else
2861 {
2862 /*
2863 * Register, memory.
2864 */
2865 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2866 IEM_MC_LOCAL(uint32_t, fEFlags);
2867 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2868 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2869 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2871
2872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2873 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2874 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2875 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2876
2877 IEM_MC_PREPARE_AVX_USAGE();
2878 IEM_MC_FETCH_EFLAGS(fEFlags);
2879 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2880 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2881 pEFlags, uSrc1, uSrc2);
2882 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2883 IEM_MC_COMMIT_EFLAGS(fEFlags);
2884
2885 IEM_MC_ADVANCE_RIP_AND_FINISH();
2886 IEM_MC_END();
2887 }
2888}
2889
2890
2891/* Opcode VEX.F3.0F 0x2e - invalid */
2892/* Opcode VEX.F2.0F 0x2e - invalid */
2893
2894/**
2895 * @opcode 0x2f
2896 * @oppfx none
2897 * @opflmodify cf,pf,af,zf,sf,of
2898 * @opflclear af,sf,of
2899 */
2900FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2901{
2902 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2904 if (IEM_IS_MODRM_REG_MODE(bRm))
2905 {
2906 /*
2907 * Register, register.
2908 */
2909 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2910 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2911 IEM_MC_LOCAL(uint32_t, fEFlags);
2912 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2913 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2914 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2915 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2916 IEM_MC_PREPARE_AVX_USAGE();
2917 IEM_MC_FETCH_EFLAGS(fEFlags);
2918 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2919 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2920 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2921 pEFlags, uSrc1, uSrc2);
2922 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2923 IEM_MC_COMMIT_EFLAGS(fEFlags);
2924
2925 IEM_MC_ADVANCE_RIP_AND_FINISH();
2926 IEM_MC_END();
2927 }
2928 else
2929 {
2930 /*
2931 * Register, memory.
2932 */
2933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2934 IEM_MC_LOCAL(uint32_t, fEFlags);
2935 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2936 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2937 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2939
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2941 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2943 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2944
2945 IEM_MC_PREPARE_AVX_USAGE();
2946 IEM_MC_FETCH_EFLAGS(fEFlags);
2947 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2948 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2949 pEFlags, uSrc1, uSrc2);
2950 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2951 IEM_MC_COMMIT_EFLAGS(fEFlags);
2952
2953 IEM_MC_ADVANCE_RIP_AND_FINISH();
2954 IEM_MC_END();
2955 }
2956}
2957
2958
2959/**
2960 * @opcode 0x2f
2961 * @oppfx 0x66
2962 * @opflmodify cf,pf,af,zf,sf,of
2963 * @opflclear af,sf,of
2964 */
2965FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2966{
2967 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2969 if (IEM_IS_MODRM_REG_MODE(bRm))
2970 {
2971 /*
2972 * Register, register.
2973 */
2974 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2975 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2976 IEM_MC_LOCAL(uint32_t, fEFlags);
2977 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2978 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2979 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2980 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2981 IEM_MC_PREPARE_AVX_USAGE();
2982 IEM_MC_FETCH_EFLAGS(fEFlags);
2983 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2984 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2985 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2986 pEFlags, uSrc1, uSrc2);
2987 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2988 IEM_MC_COMMIT_EFLAGS(fEFlags);
2989
2990 IEM_MC_ADVANCE_RIP_AND_FINISH();
2991 IEM_MC_END();
2992 }
2993 else
2994 {
2995 /*
2996 * Register, memory.
2997 */
2998 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2999 IEM_MC_LOCAL(uint32_t, fEFlags);
3000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3001 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3002 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3004
3005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3006 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3007 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3008 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3009
3010 IEM_MC_PREPARE_AVX_USAGE();
3011 IEM_MC_FETCH_EFLAGS(fEFlags);
3012 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3013 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3014 pEFlags, uSrc1, uSrc2);
3015 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3016 IEM_MC_COMMIT_EFLAGS(fEFlags);
3017
3018 IEM_MC_ADVANCE_RIP_AND_FINISH();
3019 IEM_MC_END();
3020 }
3021}
3022
3023
3024/* Opcode VEX.F3.0F 0x2f - invalid */
3025/* Opcode VEX.F2.0F 0x2f - invalid */
3026
3027/* Opcode VEX.0F 0x30 - invalid */
3028/* Opcode VEX.0F 0x31 - invalid */
3029/* Opcode VEX.0F 0x32 - invalid */
3030/* Opcode VEX.0F 0x33 - invalid */
3031/* Opcode VEX.0F 0x34 - invalid */
3032/* Opcode VEX.0F 0x35 - invalid */
3033/* Opcode VEX.0F 0x36 - invalid */
3034/* Opcode VEX.0F 0x37 - invalid */
3035/* Opcode VEX.0F 0x38 - invalid */
3036/* Opcode VEX.0F 0x39 - invalid */
3037/* Opcode VEX.0F 0x3a - invalid */
3038/* Opcode VEX.0F 0x3b - invalid */
3039/* Opcode VEX.0F 0x3c - invalid */
3040/* Opcode VEX.0F 0x3d - invalid */
3041/* Opcode VEX.0F 0x3e - invalid */
3042/* Opcode VEX.0F 0x3f - invalid */
3043/* Opcode VEX.0F 0x40 - invalid */
3044/* Opcode VEX.0F 0x41 - invalid */
3045/* Opcode VEX.0F 0x42 - invalid */
3046/* Opcode VEX.0F 0x43 - invalid */
3047/* Opcode VEX.0F 0x44 - invalid */
3048/* Opcode VEX.0F 0x45 - invalid */
3049/* Opcode VEX.0F 0x46 - invalid */
3050/* Opcode VEX.0F 0x47 - invalid */
3051/* Opcode VEX.0F 0x48 - invalid */
3052/* Opcode VEX.0F 0x49 - invalid */
3053/* Opcode VEX.0F 0x4a - invalid */
3054/* Opcode VEX.0F 0x4b - invalid */
3055/* Opcode VEX.0F 0x4c - invalid */
3056/* Opcode VEX.0F 0x4d - invalid */
3057/* Opcode VEX.0F 0x4e - invalid */
3058/* Opcode VEX.0F 0x4f - invalid */
3059
3060
3061/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3062FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3063{
3064 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3066 if (IEM_IS_MODRM_REG_MODE(bRm))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 if (pVCpu->iem.s.uVexLength == 0)
3072 {
3073 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3074 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3075 IEM_MC_LOCAL(uint8_t, u8Dst);
3076 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3077 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3079 IEM_MC_PREPARE_AVX_USAGE();
3080 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3081 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3082 pu8Dst, puSrc);
3083 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3084 IEM_MC_ADVANCE_RIP_AND_FINISH();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3090 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3091 IEM_MC_LOCAL(uint8_t, u8Dst);
3092 IEM_MC_LOCAL(RTUINT256U, uSrc);
3093 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3094 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3095
3096 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3097 IEM_MC_PREPARE_AVX_USAGE();
3098 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3099 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3100 pu8Dst, puSrc);
3101 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3102 IEM_MC_ADVANCE_RIP_AND_FINISH();
3103 IEM_MC_END();
3104 }
3105 }
3106 /* No memory operand. */
3107 else
3108 IEMOP_RAISE_INVALID_OPCODE_RET();
3109}
3110
3111
3112/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3113FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3114{
3115 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 if (IEM_IS_MODRM_REG_MODE(bRm))
3118 {
3119 /*
3120 * Register, register.
3121 */
3122 if (pVCpu->iem.s.uVexLength == 0)
3123 {
3124 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3125 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3126 IEM_MC_LOCAL(uint8_t, u8Dst);
3127 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3128 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3129 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3130 IEM_MC_PREPARE_AVX_USAGE();
3131 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3132 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3133 pu8Dst, puSrc);
3134 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 }
3138 else
3139 {
3140 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3141 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3142 IEM_MC_LOCAL(uint8_t, u8Dst);
3143 IEM_MC_LOCAL(RTUINT256U, uSrc);
3144 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3145 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3146
3147 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3148 IEM_MC_PREPARE_AVX_USAGE();
3149 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3150 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3151 pu8Dst, puSrc);
3152 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3153 IEM_MC_ADVANCE_RIP_AND_FINISH();
3154 IEM_MC_END();
3155 }
3156 }
3157 /* No memory operand. */
3158 else
3159 IEMOP_RAISE_INVALID_OPCODE_RET();
3160}
3161
3162
3163/* Opcode VEX.F3.0F 0x50 - invalid */
3164/* Opcode VEX.F2.0F 0x50 - invalid */
3165
3166/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3167FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3168{
3169 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3170 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3171 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3172}
3173
3174
3175/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3176FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3177{
3178 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3179 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3180 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3181}
3182
3183
3184/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3185FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3186{
3187 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3188 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3189 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3190}
3191
3192
3193/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3194FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3195{
3196 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3197 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3198 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3199}
3200
3201
3202/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3203FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3204{
3205 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3206 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3207 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3208}
3209
3210
3211/* Opcode VEX.66.0F 0x52 - invalid */
3212
3213
3214/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3215FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3216{
3217 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3218 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3219 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3220}
3221
3222
3223/* Opcode VEX.F2.0F 0x52 - invalid */
3224
3225
3226/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3227FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3228{
3229 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3230 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3231 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3232}
3233
3234
3235/* Opcode VEX.66.0F 0x53 - invalid */
3236
3237
3238/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3239FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3240{
3241 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3242 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3243 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3244}
3245
3246
3247/* Opcode VEX.F2.0F 0x53 - invalid */
3248
3249
3250/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3251FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3252{
3253 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3254 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3255 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3256}
3257
3258
3259/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3260FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3261{
3262 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3263 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3264 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3265}
3266
3267
3268/* Opcode VEX.F3.0F 0x54 - invalid */
3269/* Opcode VEX.F2.0F 0x54 - invalid */
3270
3271
3272/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3273FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3274{
3275 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3277 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3278}
3279
3280
3281/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3282FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3283{
3284 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3285 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3286 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3287}
3288
3289
3290/* Opcode VEX.F3.0F 0x55 - invalid */
3291/* Opcode VEX.F2.0F 0x55 - invalid */
3292
3293/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3294FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3295{
3296 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3297 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3298 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3299}
3300
3301
3302/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3303FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3304{
3305 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3306 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3307 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3308}
3309
3310
3311/* Opcode VEX.F3.0F 0x56 - invalid */
3312/* Opcode VEX.F2.0F 0x56 - invalid */
3313
3314
3315/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3316FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3317{
3318 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3319 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3320 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3321}
3322
3323
3324/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3325FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3326{
3327 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3328 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3329 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3330}
3331
3332
3333/* Opcode VEX.F3.0F 0x57 - invalid */
3334/* Opcode VEX.F2.0F 0x57 - invalid */
3335
3336
3337/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3338FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3339{
3340 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3341 IEMOPMEDIAF3_INIT_VARS( vaddps);
3342 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3343}
3344
3345
3346/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3347FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3348{
3349 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3350 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3351 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3352}
3353
3354
3355/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3356FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3357{
3358 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3359 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3360 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3361}
3362
3363
3364/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3365FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3366{
3367 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3368 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3369 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3370}
3371
3372
3373/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3374FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3375{
3376 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3377 IEMOPMEDIAF3_INIT_VARS( vmulps);
3378 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3379}
3380
3381
3382/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3383FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3384{
3385 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3386 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3387 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3388}
3389
3390
3391/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3392FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3393{
3394 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3395 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3396 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3397}
3398
3399
3400/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3401FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3402{
3403 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3404 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3405 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3406}
3407
3408
3409/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3410FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
3411/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3412FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3413/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3414FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3415/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3416FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3417
3418/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3419FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3420/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3421FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3422/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3423FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3424/* Opcode VEX.F2.0F 0x5b - invalid */
3425
3426
3427/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3428FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3429{
3430 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3431 IEMOPMEDIAF3_INIT_VARS( vsubps);
3432 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3433}
3434
3435
3436/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3437FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3438{
3439 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3440 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3442}
3443
3444
3445/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3446FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3447{
3448 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3449 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3450 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3451}
3452
3453
3454/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3455FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3456{
3457 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3458 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3459 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3460}
3461
3462
3463/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3464FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3465{
3466 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3467 IEMOPMEDIAF3_INIT_VARS( vminps);
3468 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3469}
3470
3471
3472/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3473FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3474{
3475 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3476 IEMOPMEDIAF3_INIT_VARS( vminpd);
3477 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3478}
3479
3480
3481/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3482FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3483{
3484 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3485 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3486 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3487}
3488
3489
3490/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3491FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3492{
3493 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3494 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3495 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3496}
3497
3498
3499/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3500FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3501{
3502 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3503 IEMOPMEDIAF3_INIT_VARS( vdivps);
3504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3505}
3506
3507
3508/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3509FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3510{
3511 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3512 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3513 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3514}
3515
3516
3517/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3518FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3519{
3520 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3521 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3522 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3523}
3524
3525
3526/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3527FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3528{
3529 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3530 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3531 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3532}
3533
3534
3535/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3536FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3537{
3538 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3539 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3541}
3542
3543
3544/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3545FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3546{
3547 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3548 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3550}
3551
3552
3553/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3554FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3555{
3556 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3557 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3558 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3559}
3560
3561
3562/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3563FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3564{
3565 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3566 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3567 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3568}
3569
3570
3571/* Opcode VEX.0F 0x60 - invalid */
3572
3573
3574/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3575FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3576{
3577 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3578 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3579 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3580}
3581
3582
3583/* Opcode VEX.F3.0F 0x60 - invalid */
3584
3585
3586/* Opcode VEX.0F 0x61 - invalid */
3587
3588
3589/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3590FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3591{
3592 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3593 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3594 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3595}
3596
3597
3598/* Opcode VEX.F3.0F 0x61 - invalid */
3599
3600
3601/* Opcode VEX.0F 0x62 - invalid */
3602
3603/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3604FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3605{
3606 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3607 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3609}
3610
3611
3612/* Opcode VEX.F3.0F 0x62 - invalid */
3613
3614
3615
3616/* Opcode VEX.0F 0x63 - invalid */
3617
3618
3619/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3620FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3621{
3622 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3623 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3624 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3625}
3626
3627
3628/* Opcode VEX.F3.0F 0x63 - invalid */
3629
3630/* Opcode VEX.0F 0x64 - invalid */
3631
3632
3633/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3634FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3635{
3636 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3637 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3638 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3639}
3640
3641
3642/* Opcode VEX.F3.0F 0x64 - invalid */
3643
3644/* Opcode VEX.0F 0x65 - invalid */
3645
3646
3647/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3648FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3649{
3650 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3651 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3652 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3653}
3654
3655
3656/* Opcode VEX.F3.0F 0x65 - invalid */
3657
3658/* Opcode VEX.0F 0x66 - invalid */
3659
3660
3661/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3662FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3663{
3664 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3665 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3666 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3667}
3668
3669
3670/* Opcode VEX.F3.0F 0x66 - invalid */
3671
3672/* Opcode VEX.0F 0x67 - invalid */
3673
3674
3675/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3676FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3677{
3678 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3679 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3680 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3681}
3682
3683
3684/* Opcode VEX.F3.0F 0x67 - invalid */
3685
3686
3687///**
3688// * Common worker for SSE2 instructions on the form:
3689// * pxxxx xmm1, xmm2/mem128
3690// *
3691// * The 2nd operand is the second half of a register, which in the memory case
3692// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3693// * where it may read the full 128 bits or only the upper 64 bits.
3694// *
3695// * Exceptions type 4.
3696// */
3697//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3698//{
3699// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3700// if (IEM_IS_MODRM_REG_MODE(bRm))
3701// {
3702// /*
3703// * Register, register.
3704// */
3705// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3706// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3707// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3708// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3709// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3710// IEM_MC_PREPARE_SSE_USAGE();
3711// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3712// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3713// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3714// IEM_MC_ADVANCE_RIP_AND_FINISH();
3715// IEM_MC_END();
3716// }
3717// else
3718// {
3719// /*
3720// * Register, memory.
3721// */
3722// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3723// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3724// IEM_MC_LOCAL(RTUINT128U, uSrc);
3725// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3726// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3727//
3728// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3729// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3730// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3731// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3732//
3733// IEM_MC_PREPARE_SSE_USAGE();
3734// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3735// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3736//
3737// IEM_MC_ADVANCE_RIP_AND_FINISH();
3738// IEM_MC_END();
3739// }
3740// return VINF_SUCCESS;
3741//}
3742
3743
3744/* Opcode VEX.0F 0x68 - invalid */
3745
3746/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3747FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3748{
3749 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3750 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3751 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3752}
3753
3754
3755/* Opcode VEX.F3.0F 0x68 - invalid */
3756
3757
3758/* Opcode VEX.0F 0x69 - invalid */
3759
3760
3761/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3762FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3763{
3764 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3765 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3767}
3768
3769
3770/* Opcode VEX.F3.0F 0x69 - invalid */
3771
3772
3773/* Opcode VEX.0F 0x6a - invalid */
3774
3775
3776/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3777FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3778{
3779 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3780 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3781 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3782}
3783
3784
3785/* Opcode VEX.F3.0F 0x6a - invalid */
3786
3787
3788/* Opcode VEX.0F 0x6b - invalid */
3789
3790
3791/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3792FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3793{
3794 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3795 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3796 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3797}
3798
3799
3800/* Opcode VEX.F3.0F 0x6b - invalid */
3801
3802
3803/* Opcode VEX.0F 0x6c - invalid */
3804
3805
3806/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3807FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3808{
3809 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3810 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3811 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3812}
3813
3814
3815/* Opcode VEX.F3.0F 0x6c - invalid */
3816/* Opcode VEX.F2.0F 0x6c - invalid */
3817
3818
3819/* Opcode VEX.0F 0x6d - invalid */
3820
3821
3822/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3823FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3824{
3825 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3826 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3827 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3828}
3829
3830
3831/* Opcode VEX.F3.0F 0x6d - invalid */
3832
3833
3834/* Opcode VEX.0F 0x6e - invalid */
3835
3836FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3837{
3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3839 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3840 {
3841 /**
3842 * @opcode 0x6e
3843 * @opcodesub rex.w=1
3844 * @oppfx 0x66
3845 * @opcpuid avx
3846 * @opgroup og_avx_simdint_datamov
3847 * @opxcpttype 5
3848 * @optest 64-bit / op1=1 op2=2 -> op1=2
3849 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3850 */
3851 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3852 if (IEM_IS_MODRM_REG_MODE(bRm))
3853 {
3854 /* XMM, greg64 */
3855 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3856 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3857 IEM_MC_LOCAL(uint64_t, u64Tmp);
3858
3859 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3860 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3861
3862 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3863 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3864
3865 IEM_MC_ADVANCE_RIP_AND_FINISH();
3866 IEM_MC_END();
3867 }
3868 else
3869 {
3870 /* XMM, [mem64] */
3871 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3873 IEM_MC_LOCAL(uint64_t, u64Tmp);
3874
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3876 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3878 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3879
3880 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3881 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3882
3883 IEM_MC_ADVANCE_RIP_AND_FINISH();
3884 IEM_MC_END();
3885 }
3886 }
3887 else
3888 {
3889 /**
3890 * @opdone
3891 * @opcode 0x6e
3892 * @opcodesub rex.w=0
3893 * @oppfx 0x66
3894 * @opcpuid avx
3895 * @opgroup og_avx_simdint_datamov
3896 * @opxcpttype 5
3897 * @opfunction iemOp_vmovd_q_Vy_Ey
3898 * @optest op1=1 op2=2 -> op1=2
3899 * @optest op1=0 op2=-42 -> op1=-42
3900 */
3901 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3902 if (IEM_IS_MODRM_REG_MODE(bRm))
3903 {
3904 /* XMM, greg32 */
3905 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3906 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3907 IEM_MC_LOCAL(uint32_t, u32Tmp);
3908
3909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3910 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3911
3912 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3913 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3914
3915 IEM_MC_ADVANCE_RIP_AND_FINISH();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /* XMM, [mem32] */
3921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923 IEM_MC_LOCAL(uint32_t, u32Tmp);
3924
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3927 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3928 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3929
3930 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3931 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3932
3933 IEM_MC_ADVANCE_RIP_AND_FINISH();
3934 IEM_MC_END();
3935 }
3936 }
3937}
3938
3939
3940/* Opcode VEX.F3.0F 0x6e - invalid */
3941
3942
3943/* Opcode VEX.0F 0x6f - invalid */
3944
3945/**
3946 * @opcode 0x6f
3947 * @oppfx 0x66
3948 * @opcpuid avx
3949 * @opgroup og_avx_simdint_datamove
3950 * @opxcpttype 1
3951 * @optest op1=1 op2=2 -> op1=2
3952 * @optest op1=0 op2=-42 -> op1=-42
3953 */
3954FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3955{
3956 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3957 Assert(pVCpu->iem.s.uVexLength <= 1);
3958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3959 if (IEM_IS_MODRM_REG_MODE(bRm))
3960 {
3961 /*
3962 * Register, register.
3963 */
3964 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3965 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3966
3967 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3968 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3969 if (pVCpu->iem.s.uVexLength == 0)
3970 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3971 IEM_GET_MODRM_RM(pVCpu, bRm));
3972 else
3973 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3974 IEM_GET_MODRM_RM(pVCpu, bRm));
3975 IEM_MC_ADVANCE_RIP_AND_FINISH();
3976 IEM_MC_END();
3977 }
3978 else if (pVCpu->iem.s.uVexLength == 0)
3979 {
3980 /*
3981 * Register, memory128.
3982 */
3983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3984 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3986
3987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3991
3992 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3993 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3994
3995 IEM_MC_ADVANCE_RIP_AND_FINISH();
3996 IEM_MC_END();
3997 }
3998 else
3999 {
4000 /*
4001 * Register, memory256.
4002 */
4003 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4004 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4006
4007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4008 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4009 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4010 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4011
4012 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4013 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4014
4015 IEM_MC_ADVANCE_RIP_AND_FINISH();
4016 IEM_MC_END();
4017 }
4018}
4019
4020/**
4021 * @opcode 0x6f
4022 * @oppfx 0xf3
4023 * @opcpuid avx
4024 * @opgroup og_avx_simdint_datamove
4025 * @opxcpttype 4UA
4026 * @optest op1=1 op2=2 -> op1=2
4027 * @optest op1=0 op2=-42 -> op1=-42
4028 */
4029FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4030{
4031 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4032 Assert(pVCpu->iem.s.uVexLength <= 1);
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 if (IEM_IS_MODRM_REG_MODE(bRm))
4035 {
4036 /*
4037 * Register, register.
4038 */
4039 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4040 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4041
4042 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4043 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4044 if (pVCpu->iem.s.uVexLength == 0)
4045 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4046 IEM_GET_MODRM_RM(pVCpu, bRm));
4047 else
4048 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4049 IEM_GET_MODRM_RM(pVCpu, bRm));
4050 IEM_MC_ADVANCE_RIP_AND_FINISH();
4051 IEM_MC_END();
4052 }
4053 else if (pVCpu->iem.s.uVexLength == 0)
4054 {
4055 /*
4056 * Register, memory128.
4057 */
4058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4059 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4061
4062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4066
4067 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4068 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4069
4070 IEM_MC_ADVANCE_RIP_AND_FINISH();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 /*
4076 * Register, memory256.
4077 */
4078 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4079 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4081
4082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4083 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4084 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4085 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4086
4087 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4088 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4089
4090 IEM_MC_ADVANCE_RIP_AND_FINISH();
4091 IEM_MC_END();
4092 }
4093}
4094
4095
4096/* Opcode VEX.0F 0x70 - invalid */
4097
4098
4099/**
4100 * Common worker for AVX/AVX2 instructions on the forms:
4101 * - vpxxx xmm0, xmm2/mem128, imm8
4102 * - vpxxx ymm0, ymm2/mem256, imm8
4103 *
4104 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4105 */
4106FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4107{
4108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4109 if (IEM_IS_MODRM_REG_MODE(bRm))
4110 {
4111 /*
4112 * Register, register.
4113 */
4114 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4115 if (pVCpu->iem.s.uVexLength)
4116 {
4117 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4118 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4119 IEM_MC_LOCAL(RTUINT256U, uDst);
4120 IEM_MC_LOCAL(RTUINT256U, uSrc);
4121 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4122 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4123 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4124 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4125 IEM_MC_PREPARE_AVX_USAGE();
4126 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4127 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4128 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4129 IEM_MC_ADVANCE_RIP_AND_FINISH();
4130 IEM_MC_END();
4131 }
4132 else
4133 {
4134 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4135 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4136 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4137 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4138 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4140 IEM_MC_PREPARE_AVX_USAGE();
4141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4142 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4143 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4144 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4145 IEM_MC_ADVANCE_RIP_AND_FINISH();
4146 IEM_MC_END();
4147 }
4148 }
4149 else
4150 {
4151 /*
4152 * Register, memory.
4153 */
4154 if (pVCpu->iem.s.uVexLength)
4155 {
4156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4157 IEM_MC_LOCAL(RTUINT256U, uDst);
4158 IEM_MC_LOCAL(RTUINT256U, uSrc);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4160 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4161 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4162
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4164 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4165 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4166 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4167 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4168 IEM_MC_PREPARE_AVX_USAGE();
4169
4170 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4171 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4172 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4173
4174 IEM_MC_ADVANCE_RIP_AND_FINISH();
4175 IEM_MC_END();
4176 }
4177 else
4178 {
4179 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4180 IEM_MC_LOCAL(RTUINT128U, uSrc);
4181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4182 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4183 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4184
4185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4186 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4187 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4188 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4189 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4190 IEM_MC_PREPARE_AVX_USAGE();
4191
4192 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4194 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4195 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4196
4197 IEM_MC_ADVANCE_RIP_AND_FINISH();
4198 IEM_MC_END();
4199 }
4200 }
4201}
4202
4203
4204/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4205FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4206{
4207 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4208 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4209 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4210
4211}
4212
4213
4214/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4215FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4216{
4217 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4218 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4219 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4220
4221}
4222
4223
4224/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4225FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4226{
4227 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4228 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4229 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4230}
4231
4232
4233/**
4234 * Common worker(s) for AVX/AVX2 instructions on the forms:
4235 * - vpxxx xmm0, xmm2, imm8
4236 * - vpxxx ymm0, ymm2, imm8
4237 *
4238 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4239 */
4240FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4241{
4242 if (IEM_IS_MODRM_REG_MODE(bRm))
4243 {
4244 /*
4245 * Register, register.
4246 */
4247 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4249 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4250 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4251 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4252 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4254 IEM_MC_PREPARE_AVX_USAGE();
4255 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4256 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4257 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4258 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4259 IEM_MC_ADVANCE_RIP_AND_FINISH();
4260 IEM_MC_END();
4261 }
4262 /* No memory operand. */
4263 else
4264 IEMOP_RAISE_INVALID_OPCODE_RET();
4265}
4266
4267FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4268{
4269 if (IEM_IS_MODRM_REG_MODE(bRm))
4270 {
4271 /*
4272 * Register, register.
4273 */
4274 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4275 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4276 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4277 IEM_MC_LOCAL(RTUINT256U, uDst);
4278 IEM_MC_LOCAL(RTUINT256U, uSrc);
4279 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4280 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4281 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4282 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4283 IEM_MC_PREPARE_AVX_USAGE();
4284 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4285 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4286 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4287 IEM_MC_ADVANCE_RIP_AND_FINISH();
4288 IEM_MC_END();
4289 }
4290 /* No memory operand. */
4291 else
4292 IEMOP_RAISE_INVALID_OPCODE_RET();
4293}
4294
4295
4296/* Opcode VEX.0F 0x71 11/2 - invalid. */
4297/** Opcode VEX.66.0F 0x71 11/2. */
4298FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4299{
4300 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4301 if (pVCpu->iem.s.uVexLength)
4302 {
4303 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4304 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4305 }
4306 else
4307 {
4308 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4309 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4310 }
4311}
4312
4313
4314/* Opcode VEX.0F 0x71 11/4 - invalid */
4315/** Opcode VEX.66.0F 0x71 11/4. */
4316FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4317{
4318 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4319 if (pVCpu->iem.s.uVexLength)
4320 {
4321 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4322 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4323 }
4324 else
4325 {
4326 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4327 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4328 }
4329}
4330
4331/* Opcode VEX.0F 0x71 11/6 - invalid */
4332
4333/** Opcode VEX.66.0F 0x71 11/6. */
4334FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4335{
4336 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4337 if (pVCpu->iem.s.uVexLength)
4338 {
4339 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4340 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4341 }
4342 else
4343 {
4344 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4345 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4346 }
4347}
4348
4349
4350/**
4351 * VEX Group 12 jump table for register variant.
4352 */
4353IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4354{
4355 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4356 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4357 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4358 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4359 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4360 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4361 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4362 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4363};
4364AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4365
4366
4367/** Opcode VEX.0F 0x71. */
4368FNIEMOP_DEF(iemOp_VGrp12)
4369{
4370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4371 if (IEM_IS_MODRM_REG_MODE(bRm))
4372 /* register, register */
4373 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4374 + pVCpu->iem.s.idxPrefix], bRm);
4375 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4376}
4377
4378
4379/* Opcode VEX.0F 0x72 11/2 - invalid. */
4380/** Opcode VEX.66.0F 0x72 11/2. */
4381FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4382{
4383 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4384 if (pVCpu->iem.s.uVexLength)
4385 {
4386 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4387 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4388 }
4389 else
4390 {
4391 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4392 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4393 }
4394}
4395
4396
4397/* Opcode VEX.0F 0x72 11/4 - invalid. */
4398/** Opcode VEX.66.0F 0x72 11/4. */
4399FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4400{
4401 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4402 if (pVCpu->iem.s.uVexLength)
4403 {
4404 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4405 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4406 }
4407 else
4408 {
4409 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4410 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4411 }
4412}
4413
4414/* Opcode VEX.0F 0x72 11/6 - invalid. */
4415
4416/** Opcode VEX.66.0F 0x72 11/6. */
4417FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4418{
4419 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4420 if (pVCpu->iem.s.uVexLength)
4421 {
4422 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4423 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4424 }
4425 else
4426 {
4427 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4428 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4429 }
4430}
4431
4432
4433/**
4434 * Group 13 jump table for register variant.
4435 */
4436IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4437{
4438 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4439 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4440 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4441 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4442 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4443 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4444 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4445 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4446};
4447AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4448
4449/** Opcode VEX.0F 0x72. */
4450FNIEMOP_DEF(iemOp_VGrp13)
4451{
4452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4453 if (IEM_IS_MODRM_REG_MODE(bRm))
4454 /* register, register */
4455 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4456 + pVCpu->iem.s.idxPrefix], bRm);
4457 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4458}
4459
4460
4461/* Opcode VEX.0F 0x73 11/2 - invalid. */
4462/** Opcode VEX.66.0F 0x73 11/2. */
4463FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4464{
4465 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4466 if (pVCpu->iem.s.uVexLength)
4467 {
4468 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4469 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4470 }
4471 else
4472 {
4473 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4474 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4475 }
4476}
4477
4478
4479/** Opcode VEX.66.0F 0x73 11/3. */
4480FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4481{
4482 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4483 if (pVCpu->iem.s.uVexLength)
4484 {
4485 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4486 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4487 }
4488 else
4489 {
4490 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4491 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4492 }
4493}
4494
4495/* Opcode VEX.0F 0x73 11/6 - invalid. */
4496
4497/** Opcode VEX.66.0F 0x73 11/6. */
4498FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4499{
4500 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4501 if (pVCpu->iem.s.uVexLength)
4502 {
4503 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4504 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4505 }
4506 else
4507 {
4508 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4509 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4510 }
4511}
4512
4513/** Opcode VEX.66.0F 0x73 11/7. */
4514FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4517 if (pVCpu->iem.s.uVexLength)
4518 {
4519 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4520 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4521 }
4522 else
4523 {
4524 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4525 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4526 }
4527}
4528
4529/* Opcode VEX.0F 0x73 11/6 - invalid. */
4530
4531/**
4532 * Group 14 jump table for register variant.
4533 */
4534IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4535{
4536 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4537 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4538 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4539 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4540 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4541 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4542 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4543 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4544};
4545AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4546
4547
4548/** Opcode VEX.0F 0x73. */
4549FNIEMOP_DEF(iemOp_VGrp14)
4550{
4551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4552 if (IEM_IS_MODRM_REG_MODE(bRm))
4553 /* register, register */
4554 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4555 + pVCpu->iem.s.idxPrefix], bRm);
4556 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4557}
4558
4559
4560/* Opcode VEX.0F 0x74 - invalid */
4561
4562
4563/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4564FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4565{
4566 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4567 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4568 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4569}
4570
4571/* Opcode VEX.F3.0F 0x74 - invalid */
4572/* Opcode VEX.F2.0F 0x74 - invalid */
4573
4574
4575/* Opcode VEX.0F 0x75 - invalid */
4576
4577
4578/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4579FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4580{
4581 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4582 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4584}
4585
4586
4587/* Opcode VEX.F3.0F 0x75 - invalid */
4588/* Opcode VEX.F2.0F 0x75 - invalid */
4589
4590
4591/* Opcode VEX.0F 0x76 - invalid */
4592
4593
4594/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4595FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4596{
4597 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4598 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4600}
4601
4602
4603/* Opcode VEX.F3.0F 0x76 - invalid */
4604/* Opcode VEX.F2.0F 0x76 - invalid */
4605
4606
4607/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4608FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4609{
4610 Assert(pVCpu->iem.s.uVexLength <= 1);
4611 if (pVCpu->iem.s.uVexLength == 0)
4612 {
4613 /*
4614 * 128-bit: vzeroupper
4615 */
4616 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4617 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4618
4619 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4620 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4621 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4622
4623 IEM_MC_CLEAR_YREG_128_UP(0);
4624 IEM_MC_CLEAR_YREG_128_UP(1);
4625 IEM_MC_CLEAR_YREG_128_UP(2);
4626 IEM_MC_CLEAR_YREG_128_UP(3);
4627 IEM_MC_CLEAR_YREG_128_UP(4);
4628 IEM_MC_CLEAR_YREG_128_UP(5);
4629 IEM_MC_CLEAR_YREG_128_UP(6);
4630 IEM_MC_CLEAR_YREG_128_UP(7);
4631
4632 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4633 {
4634 IEM_MC_CLEAR_YREG_128_UP( 8);
4635 IEM_MC_CLEAR_YREG_128_UP( 9);
4636 IEM_MC_CLEAR_YREG_128_UP(10);
4637 IEM_MC_CLEAR_YREG_128_UP(11);
4638 IEM_MC_CLEAR_YREG_128_UP(12);
4639 IEM_MC_CLEAR_YREG_128_UP(13);
4640 IEM_MC_CLEAR_YREG_128_UP(14);
4641 IEM_MC_CLEAR_YREG_128_UP(15);
4642 }
4643
4644 IEM_MC_ADVANCE_RIP_AND_FINISH();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 /*
4650 * 256-bit: vzeroall
4651 */
4652 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4653 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4654
4655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4657 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4658
4659 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4660 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4661 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4662 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4663 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4664 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4665 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4666 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4667 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4668
4669 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4670 {
4671 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4672 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4673 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4674 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4675 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4676 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4677 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4678 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4679 }
4680
4681 IEM_MC_ADVANCE_RIP_AND_FINISH();
4682 IEM_MC_END();
4683 }
4684}
4685
4686
4687/* Opcode VEX.66.0F 0x77 - invalid */
4688/* Opcode VEX.F3.0F 0x77 - invalid */
4689/* Opcode VEX.F2.0F 0x77 - invalid */
4690
4691/* Opcode VEX.0F 0x78 - invalid */
4692/* Opcode VEX.66.0F 0x78 - invalid */
4693/* Opcode VEX.F3.0F 0x78 - invalid */
4694/* Opcode VEX.F2.0F 0x78 - invalid */
4695
4696/* Opcode VEX.0F 0x79 - invalid */
4697/* Opcode VEX.66.0F 0x79 - invalid */
4698/* Opcode VEX.F3.0F 0x79 - invalid */
4699/* Opcode VEX.F2.0F 0x79 - invalid */
4700
4701/* Opcode VEX.0F 0x7a - invalid */
4702/* Opcode VEX.66.0F 0x7a - invalid */
4703/* Opcode VEX.F3.0F 0x7a - invalid */
4704/* Opcode VEX.F2.0F 0x7a - invalid */
4705
4706/* Opcode VEX.0F 0x7b - invalid */
4707/* Opcode VEX.66.0F 0x7b - invalid */
4708/* Opcode VEX.F3.0F 0x7b - invalid */
4709/* Opcode VEX.F2.0F 0x7b - invalid */
4710
4711/* Opcode VEX.0F 0x7c - invalid */
4712
4713
4714/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4715FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
4716{
4717 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4718 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
4719 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4720}
4721
4722
4723/* Opcode VEX.F3.0F 0x7c - invalid */
4724
4725
4726/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4727FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4728{
4729 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4730 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4731 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4732}
4733
4734
4735/* Opcode VEX.0F 0x7d - invalid */
4736
4737
4738/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4739FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
4740{
4741 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4742 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
4743 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4744}
4745
4746
4747/* Opcode VEX.F3.0F 0x7d - invalid */
4748
4749
4750/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4751FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
4752{
4753 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4754 IEMOPMEDIAF3_INIT_VARS( vhsubps);
4755 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4756}
4757
4758
4759/* Opcode VEX.0F 0x7e - invalid */
4760
4761FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4762{
4763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4765 {
4766 /**
4767 * @opcode 0x7e
4768 * @opcodesub rex.w=1
4769 * @oppfx 0x66
4770 * @opcpuid avx
4771 * @opgroup og_avx_simdint_datamov
4772 * @opxcpttype 5
4773 * @optest 64-bit / op1=1 op2=2 -> op1=2
4774 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4775 */
4776 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4777 if (IEM_IS_MODRM_REG_MODE(bRm))
4778 {
4779 /* greg64, XMM */
4780 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4781 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4782 IEM_MC_LOCAL(uint64_t, u64Tmp);
4783
4784 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4785 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4786
4787 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4788 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4789
4790 IEM_MC_ADVANCE_RIP_AND_FINISH();
4791 IEM_MC_END();
4792 }
4793 else
4794 {
4795 /* [mem64], XMM */
4796 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4798 IEM_MC_LOCAL(uint64_t, u64Tmp);
4799
4800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4801 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4802 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4803 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4804
4805 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4806 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4807
4808 IEM_MC_ADVANCE_RIP_AND_FINISH();
4809 IEM_MC_END();
4810 }
4811 }
4812 else
4813 {
4814 /**
4815 * @opdone
4816 * @opcode 0x7e
4817 * @opcodesub rex.w=0
4818 * @oppfx 0x66
4819 * @opcpuid avx
4820 * @opgroup og_avx_simdint_datamov
4821 * @opxcpttype 5
4822 * @opfunction iemOp_vmovd_q_Vy_Ey
4823 * @optest op1=1 op2=2 -> op1=2
4824 * @optest op1=0 op2=-42 -> op1=-42
4825 */
4826 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4827 if (IEM_IS_MODRM_REG_MODE(bRm))
4828 {
4829 /* greg32, XMM */
4830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4831 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4832 IEM_MC_LOCAL(uint32_t, u32Tmp);
4833
4834 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4835 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4836
4837 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4838 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4839
4840 IEM_MC_ADVANCE_RIP_AND_FINISH();
4841 IEM_MC_END();
4842 }
4843 else
4844 {
4845 /* [mem32], XMM */
4846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4848 IEM_MC_LOCAL(uint32_t, u32Tmp);
4849
4850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4851 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4852 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4853 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4854
4855 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4856 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4857
4858 IEM_MC_ADVANCE_RIP_AND_FINISH();
4859 IEM_MC_END();
4860 }
4861 }
4862}
4863
4864
4865/**
4866 * @opcode 0x7e
4867 * @oppfx 0xf3
4868 * @opcpuid avx
4869 * @opgroup og_avx_pcksclr_datamove
4870 * @opxcpttype none
4871 * @optest op1=1 op2=2 -> op1=2
4872 * @optest op1=0 op2=-42 -> op1=-42
4873 */
4874FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4875{
4876 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4878 if (IEM_IS_MODRM_REG_MODE(bRm))
4879 {
4880 /*
4881 * Register, register.
4882 */
4883 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4884 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4885
4886 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4887 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4888
4889 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4890 IEM_GET_MODRM_RM(pVCpu, bRm));
4891 IEM_MC_ADVANCE_RIP_AND_FINISH();
4892 IEM_MC_END();
4893 }
4894 else
4895 {
4896 /*
4897 * Memory, register.
4898 */
4899 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4900 IEM_MC_LOCAL(uint64_t, uSrc);
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4902
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4904 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4907
4908 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4909 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4910
4911 IEM_MC_ADVANCE_RIP_AND_FINISH();
4912 IEM_MC_END();
4913 }
4914
4915}
4916/* Opcode VEX.F2.0F 0x7e - invalid */
4917
4918
4919/* Opcode VEX.0F 0x7f - invalid */
4920
4921/**
4922 * @opcode 0x7f
4923 * @oppfx 0x66
4924 * @opcpuid avx
4925 * @opgroup og_avx_simdint_datamove
4926 * @opxcpttype 1
4927 * @optest op1=1 op2=2 -> op1=2
4928 * @optest op1=0 op2=-42 -> op1=-42
4929 */
4930FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4931{
4932 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4933 Assert(pVCpu->iem.s.uVexLength <= 1);
4934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4935 if (IEM_IS_MODRM_REG_MODE(bRm))
4936 {
4937 /*
4938 * Register, register.
4939 */
4940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4941 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4942
4943 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4944 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4945 if (pVCpu->iem.s.uVexLength == 0)
4946 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4947 IEM_GET_MODRM_REG(pVCpu, bRm));
4948 else
4949 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4950 IEM_GET_MODRM_REG(pVCpu, bRm));
4951 IEM_MC_ADVANCE_RIP_AND_FINISH();
4952 IEM_MC_END();
4953 }
4954 else if (pVCpu->iem.s.uVexLength == 0)
4955 {
4956 /*
4957 * Register, memory128.
4958 */
4959 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4960 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4962
4963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4964 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4965 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4966 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4967
4968 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4969 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4970
4971 IEM_MC_ADVANCE_RIP_AND_FINISH();
4972 IEM_MC_END();
4973 }
4974 else
4975 {
4976 /*
4977 * Register, memory256.
4978 */
4979 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4980 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4982
4983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4984 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4986 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4987
4988 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4989 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4990
4991 IEM_MC_ADVANCE_RIP_AND_FINISH();
4992 IEM_MC_END();
4993 }
4994}
4995
4996
4997/**
4998 * @opcode 0x7f
4999 * @oppfx 0xf3
5000 * @opcpuid avx
5001 * @opgroup og_avx_simdint_datamove
5002 * @opxcpttype 4UA
5003 * @optest op1=1 op2=2 -> op1=2
5004 * @optest op1=0 op2=-42 -> op1=-42
5005 */
5006FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5007{
5008 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5009 Assert(pVCpu->iem.s.uVexLength <= 1);
5010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5011 if (IEM_IS_MODRM_REG_MODE(bRm))
5012 {
5013 /*
5014 * Register, register.
5015 */
5016 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5017 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5018
5019 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5020 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5021 if (pVCpu->iem.s.uVexLength == 0)
5022 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5023 IEM_GET_MODRM_REG(pVCpu, bRm));
5024 else
5025 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5026 IEM_GET_MODRM_REG(pVCpu, bRm));
5027 IEM_MC_ADVANCE_RIP_AND_FINISH();
5028 IEM_MC_END();
5029 }
5030 else if (pVCpu->iem.s.uVexLength == 0)
5031 {
5032 /*
5033 * Register, memory128.
5034 */
5035 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5036 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5038
5039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5040 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5041 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5042 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5043
5044 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5045 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5046
5047 IEM_MC_ADVANCE_RIP_AND_FINISH();
5048 IEM_MC_END();
5049 }
5050 else
5051 {
5052 /*
5053 * Register, memory256.
5054 */
5055 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5056 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5058
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5060 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5063
5064 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5065 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5066
5067 IEM_MC_ADVANCE_RIP_AND_FINISH();
5068 IEM_MC_END();
5069 }
5070}
5071
5072/* Opcode VEX.F2.0F 0x7f - invalid */
5073
5074
5075/* Opcode VEX.0F 0x80 - invalid */
5076/* Opcode VEX.0F 0x81 - invalid */
5077/* Opcode VEX.0F 0x82 - invalid */
5078/* Opcode VEX.0F 0x83 - invalid */
5079/* Opcode VEX.0F 0x84 - invalid */
5080/* Opcode VEX.0F 0x85 - invalid */
5081/* Opcode VEX.0F 0x86 - invalid */
5082/* Opcode VEX.0F 0x87 - invalid */
5083/* Opcode VEX.0F 0x88 - invalid */
5084/* Opcode VEX.0F 0x89 - invalid */
5085/* Opcode VEX.0F 0x8a - invalid */
5086/* Opcode VEX.0F 0x8b - invalid */
5087/* Opcode VEX.0F 0x8c - invalid */
5088/* Opcode VEX.0F 0x8d - invalid */
5089/* Opcode VEX.0F 0x8e - invalid */
5090/* Opcode VEX.0F 0x8f - invalid */
5091/* Opcode VEX.0F 0x90 - invalid */
5092/* Opcode VEX.0F 0x91 - invalid */
5093/* Opcode VEX.0F 0x92 - invalid */
5094/* Opcode VEX.0F 0x93 - invalid */
5095/* Opcode VEX.0F 0x94 - invalid */
5096/* Opcode VEX.0F 0x95 - invalid */
5097/* Opcode VEX.0F 0x96 - invalid */
5098/* Opcode VEX.0F 0x97 - invalid */
5099/* Opcode VEX.0F 0x98 - invalid */
5100/* Opcode VEX.0F 0x99 - invalid */
5101/* Opcode VEX.0F 0x9a - invalid */
5102/* Opcode VEX.0F 0x9b - invalid */
5103/* Opcode VEX.0F 0x9c - invalid */
5104/* Opcode VEX.0F 0x9d - invalid */
5105/* Opcode VEX.0F 0x9e - invalid */
5106/* Opcode VEX.0F 0x9f - invalid */
5107/* Opcode VEX.0F 0xa0 - invalid */
5108/* Opcode VEX.0F 0xa1 - invalid */
5109/* Opcode VEX.0F 0xa2 - invalid */
5110/* Opcode VEX.0F 0xa3 - invalid */
5111/* Opcode VEX.0F 0xa4 - invalid */
5112/* Opcode VEX.0F 0xa5 - invalid */
5113/* Opcode VEX.0F 0xa6 - invalid */
5114/* Opcode VEX.0F 0xa7 - invalid */
5115/* Opcode VEX.0F 0xa8 - invalid */
5116/* Opcode VEX.0F 0xa9 - invalid */
5117/* Opcode VEX.0F 0xaa - invalid */
5118/* Opcode VEX.0F 0xab - invalid */
5119/* Opcode VEX.0F 0xac - invalid */
5120/* Opcode VEX.0F 0xad - invalid */
5121
5122
5123/* Opcode VEX.0F 0xae mem/0 - invalid. */
5124/* Opcode VEX.0F 0xae mem/1 - invalid. */
5125
5126/**
5127 * @ opmaps grp15
5128 * @ opcode !11/2
5129 * @ oppfx none
5130 * @ opcpuid sse
5131 * @ opgroup og_sse_mxcsrsm
5132 * @ opxcpttype 5
5133 * @ optest op1=0 -> mxcsr=0
5134 * @ optest op1=0x2083 -> mxcsr=0x2083
5135 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5136 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5137 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5138 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5139 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5140 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5141 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5142 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5143 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5144 */
5145FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
5146//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5147//{
5148// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5149// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5150// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5151// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5152// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5153// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5154// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5155// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5156// IEM_MC_END();
5157// return VINF_SUCCESS;
5158//}
5159
5160
5161/**
5162 * @opmaps vexgrp15
5163 * @opcode !11/3
5164 * @oppfx none
5165 * @opcpuid avx
5166 * @opgroup og_avx_mxcsrsm
5167 * @opxcpttype 5
5168 * @optest mxcsr=0 -> op1=0
5169 * @optest mxcsr=0x2083 -> op1=0x2083
5170 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5171 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5172 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5173 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5174 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5175 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5176 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5177 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5178 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5179 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5180 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5181 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5182 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5183 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5184 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5185 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5186 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5187 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5188 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5189 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5190 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5191 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5192 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5193 * -> value.xcpt=0x6
5194 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5195 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5196 * APMv4 rev 3.17 page 509.
5197 * @todo Test this instruction on AMD Ryzen.
5198 */
5199FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5200{
5201 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5202 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5203 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5205 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5206 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5207 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5208 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5209 IEM_MC_END();
5210}
5211
5212/* Opcode VEX.0F 0xae mem/4 - invalid. */
5213/* Opcode VEX.0F 0xae mem/5 - invalid. */
5214/* Opcode VEX.0F 0xae mem/6 - invalid. */
5215/* Opcode VEX.0F 0xae mem/7 - invalid. */
5216
5217/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5218/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5219/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5220/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5221/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5222/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5223/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5224/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5225
5226/**
5227 * Vex group 15 jump table for memory variant.
5228 */
5229IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5230{ /* pfx: none, 066h, 0f3h, 0f2h */
5231 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5232 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5233 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5234 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5235 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5236 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5237 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5238 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5239};
5240AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5241
5242
5243/** Opcode vex. 0xae. */
5244FNIEMOP_DEF(iemOp_VGrp15)
5245{
5246 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5247 if (IEM_IS_MODRM_REG_MODE(bRm))
5248 /* register, register */
5249 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5250
5251 /* memory, register */
5252 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5253 + pVCpu->iem.s.idxPrefix], bRm);
5254}
5255
5256
5257/* Opcode VEX.0F 0xaf - invalid. */
5258
5259/* Opcode VEX.0F 0xb0 - invalid. */
5260/* Opcode VEX.0F 0xb1 - invalid. */
5261/* Opcode VEX.0F 0xb2 - invalid. */
5262/* Opcode VEX.0F 0xb2 - invalid. */
5263/* Opcode VEX.0F 0xb3 - invalid. */
5264/* Opcode VEX.0F 0xb4 - invalid. */
5265/* Opcode VEX.0F 0xb5 - invalid. */
5266/* Opcode VEX.0F 0xb6 - invalid. */
5267/* Opcode VEX.0F 0xb7 - invalid. */
5268/* Opcode VEX.0F 0xb8 - invalid. */
5269/* Opcode VEX.0F 0xb9 - invalid. */
5270/* Opcode VEX.0F 0xba - invalid. */
5271/* Opcode VEX.0F 0xbb - invalid. */
5272/* Opcode VEX.0F 0xbc - invalid. */
5273/* Opcode VEX.0F 0xbd - invalid. */
5274/* Opcode VEX.0F 0xbe - invalid. */
5275/* Opcode VEX.0F 0xbf - invalid. */
5276
5277/* Opcode VEX.0F 0xc0 - invalid. */
5278/* Opcode VEX.66.0F 0xc0 - invalid. */
5279/* Opcode VEX.F3.0F 0xc0 - invalid. */
5280/* Opcode VEX.F2.0F 0xc0 - invalid. */
5281
5282/* Opcode VEX.0F 0xc1 - invalid. */
5283/* Opcode VEX.66.0F 0xc1 - invalid. */
5284/* Opcode VEX.F3.0F 0xc1 - invalid. */
5285/* Opcode VEX.F2.0F 0xc1 - invalid. */
5286
5287#define IEMOP_VCMPP_BODY(a_Instr) \
5288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5289 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5290 { \
5291 /* \
5292 * Register, Register. \
5293 */ \
5294 if (pVCpu->iem.s.uVexLength) \
5295 { \
5296 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5297 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5298 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5300 IEM_MC_PREPARE_AVX_USAGE(); \
5301 IEM_MC_LOCAL(X86YMMREG, uDst); \
5302 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5303 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5304 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5305 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5306 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5307 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5308 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5309 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5310 puDst, puSrc, bImmArg); \
5311 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5312 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5313 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5314 IEM_MC_END(); \
5315 } \
5316 else \
5317 { \
5318 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5319 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5320 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5322 IEM_MC_PREPARE_AVX_USAGE(); \
5323 IEM_MC_LOCAL(X86XMMREG, uDst); \
5324 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5325 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5326 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5327 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5328 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5329 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5330 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5331 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5332 puDst, puSrc, bImmArg); \
5333 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5334 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5336 IEM_MC_END(); \
5337 } \
5338 } \
5339 else \
5340 { \
5341 /* \
5342 * Register, Memory. \
5343 */ \
5344 if (pVCpu->iem.s.uVexLength) \
5345 { \
5346 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5349 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5352 IEM_MC_PREPARE_AVX_USAGE(); \
5353 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5354 IEM_MC_LOCAL(X86YMMREG, uDst); \
5355 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5356 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5357 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5358 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5359 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5360 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5361 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5362 puDst, puSrc, bImmArg); \
5363 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5364 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5365 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5366 IEM_MC_END(); \
5367 } \
5368 else \
5369 { \
5370 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5373 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5376 IEM_MC_PREPARE_AVX_USAGE(); \
5377 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5378 IEM_MC_LOCAL(X86XMMREG, uDst); \
5379 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5380 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5381 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5382 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5383 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5384 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5385 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5386 puDst, puSrc, bImmArg); \
5387 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5388 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5389 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5390 IEM_MC_END(); \
5391 } \
5392 } \
5393 (void)0
5394
5395
5396/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5397FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5398{
5399 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5400 IEMOP_VCMPP_BODY(vcmpps);
5401}
5402
5403
5404/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5405FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5406{
5407 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5408 IEMOP_VCMPP_BODY(vcmppd);
5409}
5410
5411
5412/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5413FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5414{
5415 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5416
5417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5418 if (IEM_IS_MODRM_REG_MODE(bRm))
5419 {
5420 /*
5421 * XMM32, XMM32.
5422 */
5423 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5424 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5425 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5426 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5427 IEM_MC_PREPARE_AVX_USAGE();
5428 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5429 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5430 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5431 IEM_MC_LOCAL(X86XMMREG, uDst);
5432 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5433 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5434 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5435 puDst, puSrc, bImmArg);
5436 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5437 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5438
5439 IEM_MC_ADVANCE_RIP_AND_FINISH();
5440 IEM_MC_END();
5441 }
5442 else
5443 {
5444 /*
5445 * XMM32, [mem32].
5446 */
5447 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5448
5449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5451 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5452 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5453 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5454 IEM_MC_PREPARE_AVX_USAGE();
5455
5456 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5457 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5458 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5459 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5460 IEM_MC_LOCAL(X86XMMREG, uDst);
5461 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5462 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5463 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5464 puDst, puSrc, bImmArg);
5465 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5466 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5467
5468 IEM_MC_ADVANCE_RIP_AND_FINISH();
5469 IEM_MC_END();
5470 }
5471}
5472
5473
5474/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5475FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5476{
5477 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5478
5479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5480 if (IEM_IS_MODRM_REG_MODE(bRm))
5481 {
5482 /*
5483 * XMM64, XMM64.
5484 */
5485 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5486 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5487 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5488 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5489 IEM_MC_PREPARE_AVX_USAGE();
5490 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5491 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5492 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5493 IEM_MC_LOCAL(X86XMMREG, uDst);
5494 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5495 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5496 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5497 puDst, puSrc, bImmArg);
5498 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5499 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5500
5501 IEM_MC_ADVANCE_RIP_AND_FINISH();
5502 IEM_MC_END();
5503 }
5504 else
5505 {
5506 /*
5507 * XMM64, [mem64].
5508 */
5509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5510
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5513 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5514 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5516 IEM_MC_PREPARE_AVX_USAGE();
5517
5518 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5519 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5520 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5521 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5522 IEM_MC_LOCAL(X86XMMREG, uDst);
5523 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5524 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5525 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5526 puDst, puSrc, bImmArg);
5527 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5528 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5529
5530 IEM_MC_ADVANCE_RIP_AND_FINISH();
5531 IEM_MC_END();
5532 }
5533}
5534
5535
5536/* Opcode VEX.0F 0xc3 - invalid */
5537/* Opcode VEX.66.0F 0xc3 - invalid */
5538/* Opcode VEX.F3.0F 0xc3 - invalid */
5539/* Opcode VEX.F2.0F 0xc3 - invalid */
5540
5541/* Opcode VEX.0F 0xc4 - invalid */
5542
5543
5544/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5545FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5546{
5547 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5549 if (IEM_IS_MODRM_REG_MODE(bRm))
5550 {
5551 /*
5552 * Register, register.
5553 */
5554 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5555 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5556 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5557 IEM_MC_LOCAL(uint16_t, uValue);
5558
5559 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5560 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5561 IEM_MC_PREPARE_AVX_USAGE();
5562
5563 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5564 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5565 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5566 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5567 IEM_MC_ADVANCE_RIP_AND_FINISH();
5568 IEM_MC_END();
5569 }
5570 else
5571 {
5572 /*
5573 * Register, memory.
5574 */
5575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5577 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5578 IEM_MC_LOCAL(uint16_t, uValue);
5579
5580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5581 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5582 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5584 IEM_MC_PREPARE_AVX_USAGE();
5585
5586 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5587 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5588 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5589 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5590 IEM_MC_ADVANCE_RIP_AND_FINISH();
5591 IEM_MC_END();
5592 }
5593}
5594
5595
5596/* Opcode VEX.F3.0F 0xc4 - invalid */
5597/* Opcode VEX.F2.0F 0xc4 - invalid */
5598
5599/* Opcode VEX.0F 0xc5 - invalid */
5600
5601
5602/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
5603FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
5604{
5605 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
5606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5607 if (IEM_IS_MODRM_REG_MODE(bRm))
5608 {
5609 /*
5610 * greg32, XMM, imm8.
5611 */
5612 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5613 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5614 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5615 IEM_MC_LOCAL(uint16_t, uValue);
5616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5617 IEM_MC_PREPARE_AVX_USAGE();
5618 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
5619 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 }
5623 /* No memory operand. */
5624 else
5625 IEMOP_RAISE_INVALID_OPCODE_RET();
5626}
5627
5628
5629/* Opcode VEX.F3.0F 0xc5 - invalid */
5630/* Opcode VEX.F2.0F 0xc5 - invalid */
5631
5632
5633#define VSHUFP_X(a_Instr) \
5634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5635 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5636 { \
5637 /* \
5638 * Register, register. \
5639 */ \
5640 if (pVCpu->iem.s.uVexLength) \
5641 { \
5642 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5644 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5645 IEM_MC_LOCAL(RTUINT256U, uDst); \
5646 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5647 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5648 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5649 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5650 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5651 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5652 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5653 IEM_MC_PREPARE_AVX_USAGE(); \
5654 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5655 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5656 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5657 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5658 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5659 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5660 IEM_MC_END(); \
5661 } \
5662 else \
5663 { \
5664 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5665 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5666 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5667 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5668 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5669 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
5670 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5671 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5672 IEM_MC_PREPARE_AVX_USAGE(); \
5673 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5674 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5675 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5676 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5677 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5678 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5679 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5680 IEM_MC_END(); \
5681 } \
5682 } \
5683 else \
5684 { \
5685 /* \
5686 * Register, memory. \
5687 */ \
5688 if (pVCpu->iem.s.uVexLength) \
5689 { \
5690 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5691 IEM_MC_LOCAL(RTUINT256U, uDst); \
5692 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5693 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5695 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5696 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5697 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5699 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5700 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5701 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5702 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5703 IEM_MC_PREPARE_AVX_USAGE(); \
5704 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5705 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5706 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5707 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5708 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5709 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5710 IEM_MC_END(); \
5711 } \
5712 else \
5713 { \
5714 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5715 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
5716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5717 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5718 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5719 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
5720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5721 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5722 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5723 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5724 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5725 IEM_MC_PREPARE_AVX_USAGE(); \
5726 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5727 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5728 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5729 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5730 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5731 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5732 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5733 IEM_MC_END(); \
5734 } \
5735 } \
5736 (void)0
5737
5738/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
5739FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
5740{
5741 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5742 VSHUFP_X(vshufps);
5743}
5744
5745
5746/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
5747FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
5748{
5749 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5750 VSHUFP_X(vshufpd);
5751}
5752#undef VSHUFP_X
5753
5754
5755/* Opcode VEX.F3.0F 0xc6 - invalid */
5756/* Opcode VEX.F2.0F 0xc6 - invalid */
5757
5758/* Opcode VEX.0F 0xc7 - invalid */
5759/* Opcode VEX.66.0F 0xc7 - invalid */
5760/* Opcode VEX.F3.0F 0xc7 - invalid */
5761/* Opcode VEX.F2.0F 0xc7 - invalid */
5762
5763/* Opcode VEX.0F 0xc8 - invalid */
5764/* Opcode VEX.0F 0xc9 - invalid */
5765/* Opcode VEX.0F 0xca - invalid */
5766/* Opcode VEX.0F 0xcb - invalid */
5767/* Opcode VEX.0F 0xcc - invalid */
5768/* Opcode VEX.0F 0xcd - invalid */
5769/* Opcode VEX.0F 0xce - invalid */
5770/* Opcode VEX.0F 0xcf - invalid */
5771
5772
5773/* Opcode VEX.0F 0xd0 - invalid */
5774
5775
5776/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
5777FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
5778{
5779 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5780 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
5781 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5782}
5783
5784
5785/* Opcode VEX.F3.0F 0xd0 - invalid */
5786
5787
5788/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
5789FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
5790{
5791 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5792 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
5793 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5794}
5795
5796
5797/* Opcode VEX.0F 0xd1 - invalid */
5798
5799
5800/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
5801FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
5802{
5803 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5804 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
5805 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5806}
5807
5808/* Opcode VEX.F3.0F 0xd1 - invalid */
5809/* Opcode VEX.F2.0F 0xd1 - invalid */
5810
5811/* Opcode VEX.0F 0xd2 - invalid */
5812/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
5813FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
5814{
5815 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5816 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
5817 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5818}
5819
5820/* Opcode VEX.F3.0F 0xd2 - invalid */
5821/* Opcode VEX.F2.0F 0xd2 - invalid */
5822
5823/* Opcode VEX.0F 0xd3 - invalid */
5824/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
5825FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
5826{
5827 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5828 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
5829 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5830}
5831
5832/* Opcode VEX.F3.0F 0xd3 - invalid */
5833/* Opcode VEX.F2.0F 0xd3 - invalid */
5834
5835/* Opcode VEX.0F 0xd4 - invalid */
5836
5837
5838/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
5839FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
5840{
5841 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5842 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
5843 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5844}
5845
5846
5847/* Opcode VEX.F3.0F 0xd4 - invalid */
5848/* Opcode VEX.F2.0F 0xd4 - invalid */
5849
5850/* Opcode VEX.0F 0xd5 - invalid */
5851
5852
5853/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
5854FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
5855{
5856 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5857 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
5858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5859}
5860
5861
5862/* Opcode VEX.F3.0F 0xd5 - invalid */
5863/* Opcode VEX.F2.0F 0xd5 - invalid */
5864
5865/* Opcode VEX.0F 0xd6 - invalid */
5866
5867/**
5868 * @opcode 0xd6
5869 * @oppfx 0x66
5870 * @opcpuid avx
5871 * @opgroup og_avx_pcksclr_datamove
5872 * @opxcpttype none
5873 * @optest op1=-1 op2=2 -> op1=2
5874 * @optest op1=0 op2=-42 -> op1=-42
5875 */
5876FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
5877{
5878 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5880 if (IEM_IS_MODRM_REG_MODE(bRm))
5881 {
5882 /*
5883 * Register, register.
5884 */
5885 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5886 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5887
5888 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5889 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5890
5891 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5892 IEM_GET_MODRM_REG(pVCpu, bRm));
5893 IEM_MC_ADVANCE_RIP_AND_FINISH();
5894 IEM_MC_END();
5895 }
5896 else
5897 {
5898 /*
5899 * Memory, register.
5900 */
5901 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5902 IEM_MC_LOCAL(uint64_t, uSrc);
5903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5904
5905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5906 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5907 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5908 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5909
5910 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5911 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5912
5913 IEM_MC_ADVANCE_RIP_AND_FINISH();
5914 IEM_MC_END();
5915 }
5916}
5917
5918/* Opcode VEX.F3.0F 0xd6 - invalid */
5919/* Opcode VEX.F2.0F 0xd6 - invalid */
5920
5921
5922/* Opcode VEX.0F 0xd7 - invalid */
5923
5924/** Opcode VEX.66.0F 0xd7 - */
5925FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
5926{
5927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5928 /* Docs says register only. */
5929 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
5930 {
5931 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
5932 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
5933 if (pVCpu->iem.s.uVexLength)
5934 {
5935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5936 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
5937 IEM_MC_ARG(uint64_t *, puDst, 0);
5938 IEM_MC_LOCAL(RTUINT256U, uSrc);
5939 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
5940 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5941 IEM_MC_PREPARE_AVX_USAGE();
5942 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5943 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5944 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
5945 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
5946 IEM_MC_ADVANCE_RIP_AND_FINISH();
5947 IEM_MC_END();
5948 }
5949 else
5950 {
5951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5952 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5953 IEM_MC_ARG(uint64_t *, puDst, 0);
5954 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5955 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5956 IEM_MC_PREPARE_AVX_USAGE();
5957 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5958 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5959 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5960 IEM_MC_ADVANCE_RIP_AND_FINISH();
5961 IEM_MC_END();
5962 }
5963 }
5964 else
5965 IEMOP_RAISE_INVALID_OPCODE_RET();
5966}
5967
5968
5969/* Opcode VEX.F3.0F 0xd7 - invalid */
5970/* Opcode VEX.F2.0F 0xd7 - invalid */
5971
5972
5973/* Opcode VEX.0F 0xd8 - invalid */
5974
5975/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5976FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5977{
5978 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5979 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5980 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5981}
5982
5983
5984/* Opcode VEX.F3.0F 0xd8 - invalid */
5985/* Opcode VEX.F2.0F 0xd8 - invalid */
5986
5987/* Opcode VEX.0F 0xd9 - invalid */
5988
5989
5990/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5991FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5992{
5993 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5994 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5995 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5996}
5997
5998
5999/* Opcode VEX.F3.0F 0xd9 - invalid */
6000/* Opcode VEX.F2.0F 0xd9 - invalid */
6001
6002/* Opcode VEX.0F 0xda - invalid */
6003
6004
6005/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6006FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6007{
6008 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6009 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6010 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6011}
6012
6013
6014/* Opcode VEX.F3.0F 0xda - invalid */
6015/* Opcode VEX.F2.0F 0xda - invalid */
6016
6017/* Opcode VEX.0F 0xdb - invalid */
6018
6019
6020/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6021FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6022{
6023 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6024 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6025 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6026}
6027
6028
6029/* Opcode VEX.F3.0F 0xdb - invalid */
6030/* Opcode VEX.F2.0F 0xdb - invalid */
6031
6032/* Opcode VEX.0F 0xdc - invalid */
6033
6034
6035/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6036FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6037{
6038 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6039 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6040 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6041}
6042
6043
6044/* Opcode VEX.F3.0F 0xdc - invalid */
6045/* Opcode VEX.F2.0F 0xdc - invalid */
6046
6047/* Opcode VEX.0F 0xdd - invalid */
6048
6049
6050/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6051FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6052{
6053 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6054 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6055 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6056}
6057
6058
6059/* Opcode VEX.F3.0F 0xdd - invalid */
6060/* Opcode VEX.F2.0F 0xdd - invalid */
6061
6062/* Opcode VEX.0F 0xde - invalid */
6063
6064
6065/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6066FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6067{
6068 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6069 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6070 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6071}
6072
6073
6074/* Opcode VEX.F3.0F 0xde - invalid */
6075/* Opcode VEX.F2.0F 0xde - invalid */
6076
6077/* Opcode VEX.0F 0xdf - invalid */
6078
6079
6080/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6081FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6082{
6083 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6084 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6085 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6086}
6087
6088
6089/* Opcode VEX.F3.0F 0xdf - invalid */
6090/* Opcode VEX.F2.0F 0xdf - invalid */
6091
6092/* Opcode VEX.0F 0xe0 - invalid */
6093
6094
6095/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6096FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6097{
6098 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6099 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6100 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6101}
6102
6103
6104/* Opcode VEX.F3.0F 0xe0 - invalid */
6105/* Opcode VEX.F2.0F 0xe0 - invalid */
6106
6107/* Opcode VEX.0F 0xe1 - invalid */
6108/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6109FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6110{
6111 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6112 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6113 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6114}
6115
6116/* Opcode VEX.F3.0F 0xe1 - invalid */
6117/* Opcode VEX.F2.0F 0xe1 - invalid */
6118
6119/* Opcode VEX.0F 0xe2 - invalid */
6120/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6121FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6122{
6123 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6124 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6125 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6126}
6127
6128/* Opcode VEX.F3.0F 0xe2 - invalid */
6129/* Opcode VEX.F2.0F 0xe2 - invalid */
6130
6131/* Opcode VEX.0F 0xe3 - invalid */
6132
6133
6134/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6135FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6136{
6137 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6138 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6139 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6140}
6141
6142
6143/* Opcode VEX.F3.0F 0xe3 - invalid */
6144/* Opcode VEX.F2.0F 0xe3 - invalid */
6145
6146/* Opcode VEX.0F 0xe4 - invalid */
6147
6148
6149/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6150FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6151{
6152 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6153 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6154 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6155}
6156
6157
6158/* Opcode VEX.F3.0F 0xe4 - invalid */
6159/* Opcode VEX.F2.0F 0xe4 - invalid */
6160
6161/* Opcode VEX.0F 0xe5 - invalid */
6162
6163
6164/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6165FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6166{
6167 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6168 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6169 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6170}
6171
6172
6173/* Opcode VEX.F3.0F 0xe5 - invalid */
6174/* Opcode VEX.F2.0F 0xe5 - invalid */
6175
6176/* Opcode VEX.0F 0xe6 - invalid */
6177/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6178FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
6179/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6180FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
6181/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6182FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
6183
6184
6185/* Opcode VEX.0F 0xe7 - invalid */
6186
6187/**
6188 * @opcode 0xe7
6189 * @opcodesub !11 mr/reg
6190 * @oppfx 0x66
6191 * @opcpuid avx
6192 * @opgroup og_avx_cachect
6193 * @opxcpttype 1
6194 * @optest op1=-1 op2=2 -> op1=2
6195 * @optest op1=0 op2=-42 -> op1=-42
6196 */
6197FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6198{
6199 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6200 Assert(pVCpu->iem.s.uVexLength <= 1);
6201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6202 if (IEM_IS_MODRM_MEM_MODE(bRm))
6203 {
6204 if (pVCpu->iem.s.uVexLength == 0)
6205 {
6206 /*
6207 * 128-bit: Memory, register.
6208 */
6209 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6210 IEM_MC_LOCAL(RTUINT128U, uSrc);
6211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6212
6213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6214 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6215 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6216 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6217
6218 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6219 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6220
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 }
6224 else
6225 {
6226 /*
6227 * 256-bit: Memory, register.
6228 */
6229 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6230 IEM_MC_LOCAL(RTUINT256U, uSrc);
6231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6232
6233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6235 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6236 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6237
6238 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6239 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6240
6241 IEM_MC_ADVANCE_RIP_AND_FINISH();
6242 IEM_MC_END();
6243 }
6244 }
6245 /**
6246 * @opdone
6247 * @opmnemonic udvex660fe7reg
6248 * @opcode 0xe7
6249 * @opcodesub 11 mr/reg
6250 * @oppfx 0x66
6251 * @opunused immediate
6252 * @opcpuid avx
6253 * @optest ->
6254 */
6255 else
6256 IEMOP_RAISE_INVALID_OPCODE_RET();
6257}
6258
6259/* Opcode VEX.F3.0F 0xe7 - invalid */
6260/* Opcode VEX.F2.0F 0xe7 - invalid */
6261
6262
6263/* Opcode VEX.0F 0xe8 - invalid */
6264
6265
6266/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6267FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6268{
6269 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6270 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
6271 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6272}
6273
6274
6275/* Opcode VEX.F3.0F 0xe8 - invalid */
6276/* Opcode VEX.F2.0F 0xe8 - invalid */
6277
6278/* Opcode VEX.0F 0xe9 - invalid */
6279
6280
6281/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
6282FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
6283{
6284 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6285 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
6286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6287}
6288
6289
6290/* Opcode VEX.F3.0F 0xe9 - invalid */
6291/* Opcode VEX.F2.0F 0xe9 - invalid */
6292
6293/* Opcode VEX.0F 0xea - invalid */
6294
6295
6296/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
6297FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
6298{
6299 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6300 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
6301 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6302}
6303
6304
6305/* Opcode VEX.F3.0F 0xea - invalid */
6306/* Opcode VEX.F2.0F 0xea - invalid */
6307
6308/* Opcode VEX.0F 0xeb - invalid */
6309
6310
6311/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
6312FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
6313{
6314 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6315 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6316 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
6317}
6318
6319
6320
6321/* Opcode VEX.F3.0F 0xeb - invalid */
6322/* Opcode VEX.F2.0F 0xeb - invalid */
6323
6324/* Opcode VEX.0F 0xec - invalid */
6325
6326
6327/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
6328FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
6329{
6330 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6331 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
6332 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6333}
6334
6335
6336/* Opcode VEX.F3.0F 0xec - invalid */
6337/* Opcode VEX.F2.0F 0xec - invalid */
6338
6339/* Opcode VEX.0F 0xed - invalid */
6340
6341
6342/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
6343FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
6344{
6345 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6346 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
6347 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6348}
6349
6350
6351/* Opcode VEX.F3.0F 0xed - invalid */
6352/* Opcode VEX.F2.0F 0xed - invalid */
6353
6354/* Opcode VEX.0F 0xee - invalid */
6355
6356
6357/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
6358FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
6359{
6360 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6361 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
6362 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6363}
6364
6365
6366/* Opcode VEX.F3.0F 0xee - invalid */
6367/* Opcode VEX.F2.0F 0xee - invalid */
6368
6369
6370/* Opcode VEX.0F 0xef - invalid */
6371
6372
6373/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
6374FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
6375{
6376 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6377 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6378 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
6379}
6380
6381
6382/* Opcode VEX.F3.0F 0xef - invalid */
6383/* Opcode VEX.F2.0F 0xef - invalid */
6384
6385/* Opcode VEX.0F 0xf0 - invalid */
6386/* Opcode VEX.66.0F 0xf0 - invalid */
6387
6388
6389/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
6390FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
6391{
6392 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6393 Assert(pVCpu->iem.s.uVexLength <= 1);
6394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6395 if (IEM_IS_MODRM_REG_MODE(bRm))
6396 {
6397 /*
6398 * Register, register - (not implemented, assuming it raises \#UD).
6399 */
6400 IEMOP_RAISE_INVALID_OPCODE_RET();
6401 }
6402 else if (pVCpu->iem.s.uVexLength == 0)
6403 {
6404 /*
6405 * Register, memory128.
6406 */
6407 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6408 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6410
6411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6412 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6415
6416 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6417 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6418
6419 IEM_MC_ADVANCE_RIP_AND_FINISH();
6420 IEM_MC_END();
6421 }
6422 else
6423 {
6424 /*
6425 * Register, memory256.
6426 */
6427 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6428 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
6429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6430
6431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6432 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6433 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6434 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6435
6436 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6437 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
6438
6439 IEM_MC_ADVANCE_RIP_AND_FINISH();
6440 IEM_MC_END();
6441 }
6442}
6443
6444
6445/* Opcode VEX.0F 0xf1 - invalid */
6446/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
6447FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
6448{
6449 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6450 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
6451 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6452}
6453
6454/* Opcode VEX.F2.0F 0xf1 - invalid */
6455
6456/* Opcode VEX.0F 0xf2 - invalid */
6457/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
6458FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
6459{
6460 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6461 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
6462 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6463}
6464/* Opcode VEX.F2.0F 0xf2 - invalid */
6465
6466/* Opcode VEX.0F 0xf3 - invalid */
6467/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
6468FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
6469{
6470 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6471 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
6472 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6473}
6474/* Opcode VEX.F2.0F 0xf3 - invalid */
6475
6476/* Opcode VEX.0F 0xf4 - invalid */
6477
6478
6479/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
6480FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
6481{
6482 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6483 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
6484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6485}
6486
6487
6488/* Opcode VEX.F2.0F 0xf4 - invalid */
6489
6490/* Opcode VEX.0F 0xf5 - invalid */
6491
6492
6493/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6494FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6495{
6496 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6497 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6498 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6499}
6500
6501
6502/* Opcode VEX.F2.0F 0xf5 - invalid */
6503
6504/* Opcode VEX.0F 0xf6 - invalid */
6505
6506
6507/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6508FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6509{
6510 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6511 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6512 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6513}
6514
6515
6516/* Opcode VEX.F2.0F 0xf6 - invalid */
6517
6518/* Opcode VEX.0F 0xf7 - invalid */
6519
6520
6521/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6522FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6523{
6524// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6526 if (IEM_IS_MODRM_REG_MODE(bRm))
6527 {
6528 /*
6529 * XMM, XMM, (implicit) [ ER]DI
6530 */
6531 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6532 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6533 IEM_MC_LOCAL( uint64_t, u64EffAddr);
6534 IEM_MC_LOCAL( RTUINT128U, u128Mem);
6535 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
6536 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
6537 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
6538 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6539 IEM_MC_PREPARE_AVX_USAGE();
6540
6541 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
6542 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
6543 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6544 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
6545 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
6546 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
6547
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 }
6551 else
6552 {
6553 /* The memory, register encoding is invalid. */
6554 IEMOP_RAISE_INVALID_OPCODE_RET();
6555 }
6556}
6557
6558
6559/* Opcode VEX.F2.0F 0xf7 - invalid */
6560
6561/* Opcode VEX.0F 0xf8 - invalid */
6562
6563
6564/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
6565FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
6566{
6567 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6568 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
6569 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6570}
6571
6572
6573/* Opcode VEX.F2.0F 0xf8 - invalid */
6574
6575/* Opcode VEX.0F 0xf9 - invalid */
6576
6577
6578/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
6579FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
6580{
6581 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6582 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
6583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6584}
6585
6586
6587/* Opcode VEX.F2.0F 0xf9 - invalid */
6588
6589/* Opcode VEX.0F 0xfa - invalid */
6590
6591
6592/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
6593FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
6594{
6595 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6596 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
6597 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6598}
6599
6600
6601/* Opcode VEX.F2.0F 0xfa - invalid */
6602
6603/* Opcode VEX.0F 0xfb - invalid */
6604
6605
6606/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
6607FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
6608{
6609 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6610 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
6611 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6612}
6613
6614
6615/* Opcode VEX.F2.0F 0xfb - invalid */
6616
6617/* Opcode VEX.0F 0xfc - invalid */
6618
6619
6620/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
6621FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
6622{
6623 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6624 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
6625 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6626}
6627
6628
6629/* Opcode VEX.F2.0F 0xfc - invalid */
6630
6631/* Opcode VEX.0F 0xfd - invalid */
6632
6633
6634/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
6635FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
6636{
6637 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6638 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
6639 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6640}
6641
6642
6643/* Opcode VEX.F2.0F 0xfd - invalid */
6644
6645/* Opcode VEX.0F 0xfe - invalid */
6646
6647
6648/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
6649FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
6650{
6651 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6652 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
6653 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6654}
6655
6656
6657/* Opcode VEX.F2.0F 0xfe - invalid */
6658
6659
6660/** Opcode **** 0x0f 0xff - UD0 */
6661FNIEMOP_DEF(iemOp_vud0)
6662{
6663/** @todo testcase: vud0 */
6664 IEMOP_MNEMONIC(vud0, "vud0");
6665 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
6666 {
6667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
6668 if (IEM_IS_MODRM_MEM_MODE(bRm))
6669 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
6670 }
6671 IEMOP_HLP_DONE_DECODING();
6672 IEMOP_RAISE_INVALID_OPCODE_RET();
6673}
6674
6675
6676
6677/**
6678 * VEX opcode map \#1.
6679 *
6680 * @sa g_apfnTwoByteMap
6681 */
6682const PFNIEMOP g_apfnVexMap1[] =
6683{
6684 /* no prefix, 066h prefix f3h prefix, f2h prefix */
6685 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
6686 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
6687 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
6688 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
6689 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
6690 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
6691 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
6692 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
6693 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
6694 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
6695 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
6696 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
6697 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
6698 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
6699 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
6700 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
6701
6702 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
6703 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
6704 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
6705 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6706 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6707 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6708 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
6709 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6710 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
6711 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
6712 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
6713 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
6714 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
6715 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
6716 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
6717 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
6718
6719 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
6720 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
6721 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
6722 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
6723 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
6724 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
6725 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
6726 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
6727 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6728 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6729 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
6730 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6731 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
6732 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
6733 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6734 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6735
6736 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
6737 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
6738 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
6739 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
6740 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
6741 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
6742 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
6743 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
6744 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6745 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6746 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6747 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6748 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6749 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6750 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6751 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6752
6753 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
6754 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
6755 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
6756 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
6757 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
6758 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
6759 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
6760 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
6761 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
6762 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
6763 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
6764 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
6765 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
6766 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
6767 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
6768 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
6769
6770 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6771 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
6772 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6773 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6774 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6775 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6776 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6777 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6778 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
6779 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
6780 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
6781 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
6782 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
6783 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
6784 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
6785 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
6786
6787 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6788 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6789 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6790 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6791 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6792 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6793 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6794 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6795 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6796 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6797 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6798 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6799 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6800 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6801 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6802 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
6803
6804 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
6805 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6806 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6807 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6808 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6809 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6810 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6811 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6812 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
6813 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
6814 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
6815 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
6816 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
6817 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
6818 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
6819 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
6820
6821 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
6822 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
6823 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
6824 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
6825 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
6826 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
6827 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
6828 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
6829 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
6830 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
6831 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
6832 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
6833 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
6834 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
6835 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
6836 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
6837
6838 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
6839 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
6840 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
6841 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
6842 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
6843 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
6844 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
6845 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
6846 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
6847 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
6848 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
6849 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
6850 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
6851 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
6852 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
6853 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
6854
6855 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6856 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6857 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6858 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6859 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6860 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6861 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6862 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6863 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6864 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6865 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
6866 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
6867 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
6868 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
6869 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
6870 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
6871
6872 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6873 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6874 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6875 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6876 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6877 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6878 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6879 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6880 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6881 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6882 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
6883 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
6884 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
6885 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
6886 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
6887 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
6888
6889 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6890 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6891 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
6892 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6893 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6894 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6895 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
6896 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6897 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6898 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6899 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
6900 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
6901 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
6902 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
6903 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
6904 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
6905
6906 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
6907 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6908 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6909 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6910 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6911 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6912 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6913 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6914 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6915 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6916 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6917 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6918 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6919 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6920 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6921 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6922
6923 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6924 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6925 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6926 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6927 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6928 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6929 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
6930 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6931 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6932 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6933 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6934 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6935 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6936 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6937 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6938 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6939
6940 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
6941 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6942 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6943 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6944 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6945 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6946 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6947 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6948 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6949 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6950 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6951 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6952 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6953 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6954 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6955 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6956};
6957AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6958/** @} */
6959
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette