VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105318

Last change on this file since 105318 was 105316, checked in by vboxsync, 7 months ago

VMM/IEM: Implement vcvtps2pd instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 280.8 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105316 2024-07-12 18:50:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383
2384
2385/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2386FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2392 {
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /* XMM, greg64 */
2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2399 IEM_MC_PREPARE_AVX_USAGE();
2400
2401 IEM_MC_LOCAL(X86XMMREG, uDst);
2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2408 puDst, puSrc1, pi64Src2);
2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_ADVANCE_RIP_AND_FINISH();
2413 IEM_MC_END();
2414 }
2415 else
2416 {
2417 /* XMM, [mem64] */
2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_PREPARE_AVX_USAGE();
2424
2425 IEM_MC_LOCAL(X86XMMREG, uDst);
2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2429 IEM_MC_LOCAL(int64_t, i64Src2);
2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2433 puDst, puSrc1, pi64Src2);
2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440 }
2441 else
2442 {
2443 if (IEM_IS_MODRM_REG_MODE(bRm))
2444 {
2445 /* XMM, greg32 */
2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2449 IEM_MC_PREPARE_AVX_USAGE();
2450
2451 IEM_MC_LOCAL(X86XMMREG, uDst);
2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2458 puDst, puSrc1, pi32Src2);
2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465 else
2466 {
2467 /* XMM, [mem32] */
2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474
2475 IEM_MC_LOCAL(X86XMMREG, uDst);
2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2479 IEM_MC_LOCAL(int32_t, i32Src2);
2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2483 puDst, puSrc1, pi32Src2);
2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 }
2491}
2492
2493
2494/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2495FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2496{
2497 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2498 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2501 {
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /* XMM, greg64 */
2505 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2506 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2508 IEM_MC_PREPARE_AVX_USAGE();
2509
2510 IEM_MC_LOCAL(X86XMMREG, uDst);
2511 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2514 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2515 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2516 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2517 puDst, puSrc1, pi64Src2);
2518 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2519 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2520 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524 else
2525 {
2526 /* XMM, [mem64] */
2527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2530 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2532 IEM_MC_PREPARE_AVX_USAGE();
2533
2534 IEM_MC_LOCAL(X86XMMREG, uDst);
2535 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2538 IEM_MC_LOCAL(int64_t, i64Src2);
2539 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2540 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2541 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2542 puDst, puSrc1, pi64Src2);
2543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2545 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 }
2550 else
2551 {
2552 if (IEM_IS_MODRM_REG_MODE(bRm))
2553 {
2554 /* XMM, greg32 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_PREPARE_AVX_USAGE();
2559
2560 IEM_MC_LOCAL(X86XMMREG, uDst);
2561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2562 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2564 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2565 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2567 puDst, puSrc1, pi32Src2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2570 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /* XMM, [mem32] */
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2582 IEM_MC_PREPARE_AVX_USAGE();
2583
2584 IEM_MC_LOCAL(X86XMMREG, uDst);
2585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2586 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2587 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2588 IEM_MC_LOCAL(int32_t, i32Src2);
2589 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2590 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2591 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2592 puDst, puSrc1, pi32Src2);
2593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2594 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2595 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_ADVANCE_RIP_AND_FINISH();
2597 IEM_MC_END();
2598 }
2599 }
2600}
2601
2602
2603/**
2604 * @opcode 0x2b
2605 * @opcodesub !11 mr/reg
2606 * @oppfx none
2607 * @opcpuid avx
2608 * @opgroup og_avx_cachect
2609 * @opxcpttype 1
2610 * @optest op1=1 op2=2 -> op1=2
2611 * @optest op1=0 op2=-42 -> op1=-42
2612 * @note Identical implementation to vmovntpd
2613 */
2614FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2615{
2616 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2617 Assert(pVCpu->iem.s.uVexLength <= 1);
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if (IEM_IS_MODRM_MEM_MODE(bRm))
2620 {
2621 /*
2622 * memory, register.
2623 */
2624 if (pVCpu->iem.s.uVexLength == 0)
2625 {
2626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2629
2630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2633 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2636 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2644 IEM_MC_LOCAL(RTUINT256U, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2651
2652 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 }
2659 /* The register, register encoding is invalid. */
2660 else
2661 IEMOP_RAISE_INVALID_OPCODE_RET();
2662}
2663
2664/**
2665 * @opcode 0x2b
2666 * @opcodesub !11 mr/reg
2667 * @oppfx 0x66
2668 * @opcpuid avx
2669 * @opgroup og_avx_cachect
2670 * @opxcpttype 1
2671 * @optest op1=1 op2=2 -> op1=2
2672 * @optest op1=0 op2=-42 -> op1=-42
2673 * @note Identical implementation to vmovntps
2674 */
2675FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2676{
2677 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2678 Assert(pVCpu->iem.s.uVexLength <= 1);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_MEM_MODE(bRm))
2681 {
2682 /*
2683 * memory, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690
2691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2695
2696 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2697 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2712
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2715
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* The register, register encoding is invalid. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725/**
2726 * @opmnemonic udvexf30f2b
2727 * @opcode 0x2b
2728 * @oppfx 0xf3
2729 * @opunused vex.modrm
2730 * @opcpuid avx
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/**
2736 * @opmnemonic udvexf20f2b
2737 * @opcode 0x2b
2738 * @oppfx 0xf2
2739 * @opunused vex.modrm
2740 * @opcpuid avx
2741 * @optest ->
2742 * @opdone
2743 */
2744
2745
2746/* Opcode VEX.0F 0x2c - invalid */
2747/* Opcode VEX.66.0F 0x2c - invalid */
2748
2749#define IEMOP_VCVTXSS2SI_Gy_Wss_BODY(a_Instr) \
2750 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2753 { \
2754 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2755 { \
2756 /* greg64, XMM */ \
2757 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2758 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2760 IEM_MC_PREPARE_AVX_USAGE(); \
2761 IEM_MC_LOCAL( int64_t, i64Dst); \
2762 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2763 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2764 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2765 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2766 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2767 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2768 pi64Dst, pr32Src); \
2769 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2770 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2772 IEM_MC_END(); \
2773 } \
2774 else \
2775 { \
2776 /* greg64, [mem64] */ \
2777 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2782 IEM_MC_PREPARE_AVX_USAGE(); \
2783 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2784 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2785 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2786 IEM_MC_LOCAL( int64_t, i64Dst); \
2787 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2788 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2789 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2790 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2791 pi64Dst, pr32Src); \
2792 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2793 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2795 IEM_MC_END(); \
2796 } \
2797 } \
2798 else \
2799 { \
2800 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2801 { \
2802 /* greg, XMM */ \
2803 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2804 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2806 IEM_MC_PREPARE_AVX_USAGE(); \
2807 IEM_MC_LOCAL( int32_t, i32Dst); \
2808 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2809 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2810 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2812 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2813 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2814 pi32Dst, pr32Src); \
2815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2816 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2818 IEM_MC_END(); \
2819 } \
2820 else \
2821 { \
2822 /* greg, [mem] */ \
2823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2826 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2828 IEM_MC_PREPARE_AVX_USAGE(); \
2829 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2830 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2831 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2832 IEM_MC_LOCAL( int32_t, i32Dst); \
2833 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2834 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2835 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2836 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2837 pi32Dst, pr32Src); \
2838 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2839 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2841 IEM_MC_END(); \
2842 } \
2843 } \
2844 (void)0
2845
2846
2847#define IEMOP_VCVTXSD2SI_Gy_Wss_BODY(a_Instr) \
2848 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2850 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2851 { \
2852 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2853 { \
2854 /* greg64, XMM */ \
2855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2856 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2857 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2858 IEM_MC_PREPARE_AVX_USAGE(); \
2859 IEM_MC_LOCAL( int64_t, i64Dst); \
2860 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2861 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2862 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2863 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2864 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2865 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2866 pi64Dst, pr64Src); \
2867 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2868 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2870 IEM_MC_END(); \
2871 } \
2872 else \
2873 { \
2874 /* greg64, [mem64] */ \
2875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2878 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2880 IEM_MC_PREPARE_AVX_USAGE(); \
2881 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2882 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2883 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2884 IEM_MC_LOCAL( int64_t, i64Dst); \
2885 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2886 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2887 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2888 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2889 pi64Dst, pr64Src); \
2890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2891 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2893 IEM_MC_END(); \
2894 } \
2895 } \
2896 else \
2897 { \
2898 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2899 { \
2900 /* greg, XMM */ \
2901 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2903 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2904 IEM_MC_PREPARE_AVX_USAGE(); \
2905 IEM_MC_LOCAL( int32_t, i32Dst); \
2906 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2907 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2908 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2909 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2910 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2911 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2912 pi32Dst, pr64Src); \
2913 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2914 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2916 IEM_MC_END(); \
2917 } \
2918 else \
2919 { \
2920 /* greg, [mem] */ \
2921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2924 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2926 IEM_MC_PREPARE_AVX_USAGE(); \
2927 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2929 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2930 IEM_MC_LOCAL( int32_t, i32Dst); \
2931 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2932 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2933 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2934 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2935 pi32Dst, pr64Src); \
2936 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2937 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2939 IEM_MC_END(); \
2940 } \
2941 } \
2942 (void)0
2943
2944
2945/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2946FNIEMOP_DEF(iemOp_vcvttss2si_Gy_Wss)
2947{
2948 IEMOP_MNEMONIC2(VEX_RM, VCVTTSS2SI, vcvttss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvttss2si);
2950}
2951
2952
2953/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2954FNIEMOP_DEF(iemOp_vcvttsd2si_Gy_Wsd)
2955{
2956 IEMOP_MNEMONIC2(VEX_RM, VCVTTSD2SI, vcvttsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2957 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvttsd2si);
2958}
2959
2960
2961/* Opcode VEX.0F 0x2d - invalid */
2962/* Opcode VEX.66.0F 0x2d - invalid */
2963
2964
2965/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2966FNIEMOP_DEF(iemOp_vcvtss2si_Gy_Wss)
2967{
2968 IEMOP_MNEMONIC2(VEX_RM, VCVTSS2SI, vcvtss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2969 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvtss2si);
2970}
2971
2972
2973/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2974FNIEMOP_DEF(iemOp_vcvtsd2si_Gy_Wsd)
2975{
2976 IEMOP_MNEMONIC2(VEX_RM, VCVTSD2SI, vcvtsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2977 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvtsd2si);
2978}
2979
2980
2981
2982/**
2983 * @opcode 0x2e
2984 * @oppfx none
2985 * @opflmodify cf,pf,af,zf,sf,of
2986 * @opflclear af,sf,of
2987 */
2988FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2989{
2990 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2992 if (IEM_IS_MODRM_REG_MODE(bRm))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2999 IEM_MC_LOCAL(uint32_t, fEFlags);
3000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3001 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3002 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3003 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3004 IEM_MC_PREPARE_AVX_USAGE();
3005 IEM_MC_FETCH_EFLAGS(fEFlags);
3006 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3007 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3008 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3009 pEFlags, uSrc1, uSrc2);
3010 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3011 IEM_MC_COMMIT_EFLAGS(fEFlags);
3012
3013 IEM_MC_ADVANCE_RIP_AND_FINISH();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /*
3019 * Register, memory.
3020 */
3021 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3022 IEM_MC_LOCAL(uint32_t, fEFlags);
3023 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3024 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3025 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3031 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3032
3033 IEM_MC_PREPARE_AVX_USAGE();
3034 IEM_MC_FETCH_EFLAGS(fEFlags);
3035 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3036 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3037 pEFlags, uSrc1, uSrc2);
3038 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3039 IEM_MC_COMMIT_EFLAGS(fEFlags);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044}
3045
3046
3047/**
3048 * @opcode 0x2e
3049 * @oppfx 0x66
3050 * @opflmodify cf,pf,af,zf,sf,of
3051 * @opflclear af,sf,of
3052 */
3053FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
3054{
3055 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3064 IEM_MC_LOCAL(uint32_t, fEFlags);
3065 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3066 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3067 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3069 IEM_MC_PREPARE_AVX_USAGE();
3070 IEM_MC_FETCH_EFLAGS(fEFlags);
3071 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3072 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3073 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3074 pEFlags, uSrc1, uSrc2);
3075 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3076 IEM_MC_COMMIT_EFLAGS(fEFlags);
3077
3078 IEM_MC_ADVANCE_RIP_AND_FINISH();
3079 IEM_MC_END();
3080 }
3081 else
3082 {
3083 /*
3084 * Register, memory.
3085 */
3086 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3087 IEM_MC_LOCAL(uint32_t, fEFlags);
3088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3089 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3090 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3094 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3096 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097
3098 IEM_MC_PREPARE_AVX_USAGE();
3099 IEM_MC_FETCH_EFLAGS(fEFlags);
3100 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3101 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3102 pEFlags, uSrc1, uSrc2);
3103 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3104 IEM_MC_COMMIT_EFLAGS(fEFlags);
3105
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 }
3109}
3110
3111
3112/* Opcode VEX.F3.0F 0x2e - invalid */
3113/* Opcode VEX.F2.0F 0x2e - invalid */
3114
3115/**
3116 * @opcode 0x2f
3117 * @oppfx none
3118 * @opflmodify cf,pf,af,zf,sf,of
3119 * @opflclear af,sf,of
3120 */
3121FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
3122{
3123 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_REG_MODE(bRm))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3131 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3132 IEM_MC_LOCAL(uint32_t, fEFlags);
3133 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3134 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3135 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3137 IEM_MC_PREPARE_AVX_USAGE();
3138 IEM_MC_FETCH_EFLAGS(fEFlags);
3139 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3140 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3141 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3142 pEFlags, uSrc1, uSrc2);
3143 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3144 IEM_MC_COMMIT_EFLAGS(fEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /*
3152 * Register, memory.
3153 */
3154 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3155 IEM_MC_LOCAL(uint32_t, fEFlags);
3156 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3157 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3158 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3164 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3165
3166 IEM_MC_PREPARE_AVX_USAGE();
3167 IEM_MC_FETCH_EFLAGS(fEFlags);
3168 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3170 pEFlags, uSrc1, uSrc2);
3171 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3172 IEM_MC_COMMIT_EFLAGS(fEFlags);
3173
3174 IEM_MC_ADVANCE_RIP_AND_FINISH();
3175 IEM_MC_END();
3176 }
3177}
3178
3179
3180/**
3181 * @opcode 0x2f
3182 * @oppfx 0x66
3183 * @opflmodify cf,pf,af,zf,sf,of
3184 * @opflclear af,sf,of
3185 */
3186FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
3187{
3188 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_REG_MODE(bRm))
3191 {
3192 /*
3193 * Register, register.
3194 */
3195 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_LOCAL(uint32_t, fEFlags);
3198 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3199 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3200 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3202 IEM_MC_PREPARE_AVX_USAGE();
3203 IEM_MC_FETCH_EFLAGS(fEFlags);
3204 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3205 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3206 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3207 pEFlags, uSrc1, uSrc2);
3208 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3209 IEM_MC_COMMIT_EFLAGS(fEFlags);
3210
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /*
3217 * Register, memory.
3218 */
3219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3220 IEM_MC_LOCAL(uint32_t, fEFlags);
3221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3222 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3223 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3225
3226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3227 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3229 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230
3231 IEM_MC_PREPARE_AVX_USAGE();
3232 IEM_MC_FETCH_EFLAGS(fEFlags);
3233 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3234 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3235 pEFlags, uSrc1, uSrc2);
3236 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3237 IEM_MC_COMMIT_EFLAGS(fEFlags);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/* Opcode VEX.F3.0F 0x2f - invalid */
3246/* Opcode VEX.F2.0F 0x2f - invalid */
3247
3248/* Opcode VEX.0F 0x30 - invalid */
3249/* Opcode VEX.0F 0x31 - invalid */
3250/* Opcode VEX.0F 0x32 - invalid */
3251/* Opcode VEX.0F 0x33 - invalid */
3252/* Opcode VEX.0F 0x34 - invalid */
3253/* Opcode VEX.0F 0x35 - invalid */
3254/* Opcode VEX.0F 0x36 - invalid */
3255/* Opcode VEX.0F 0x37 - invalid */
3256/* Opcode VEX.0F 0x38 - invalid */
3257/* Opcode VEX.0F 0x39 - invalid */
3258/* Opcode VEX.0F 0x3a - invalid */
3259/* Opcode VEX.0F 0x3b - invalid */
3260/* Opcode VEX.0F 0x3c - invalid */
3261/* Opcode VEX.0F 0x3d - invalid */
3262/* Opcode VEX.0F 0x3e - invalid */
3263/* Opcode VEX.0F 0x3f - invalid */
3264/* Opcode VEX.0F 0x40 - invalid */
3265/* Opcode VEX.0F 0x41 - invalid */
3266/* Opcode VEX.0F 0x42 - invalid */
3267/* Opcode VEX.0F 0x43 - invalid */
3268/* Opcode VEX.0F 0x44 - invalid */
3269/* Opcode VEX.0F 0x45 - invalid */
3270/* Opcode VEX.0F 0x46 - invalid */
3271/* Opcode VEX.0F 0x47 - invalid */
3272/* Opcode VEX.0F 0x48 - invalid */
3273/* Opcode VEX.0F 0x49 - invalid */
3274/* Opcode VEX.0F 0x4a - invalid */
3275/* Opcode VEX.0F 0x4b - invalid */
3276/* Opcode VEX.0F 0x4c - invalid */
3277/* Opcode VEX.0F 0x4d - invalid */
3278/* Opcode VEX.0F 0x4e - invalid */
3279/* Opcode VEX.0F 0x4f - invalid */
3280
3281
3282/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3283FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3284{
3285 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 if (IEM_IS_MODRM_REG_MODE(bRm))
3288 {
3289 /*
3290 * Register, register.
3291 */
3292 if (pVCpu->iem.s.uVexLength == 0)
3293 {
3294 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3295 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3296 IEM_MC_LOCAL(uint8_t, u8Dst);
3297 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3298 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3300 IEM_MC_PREPARE_AVX_USAGE();
3301 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3302 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3303 pu8Dst, puSrc);
3304 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 IEM_MC_END();
3307 }
3308 else
3309 {
3310 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3312 IEM_MC_LOCAL(uint8_t, u8Dst);
3313 IEM_MC_LOCAL(RTUINT256U, uSrc);
3314 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3315 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3316
3317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3318 IEM_MC_PREPARE_AVX_USAGE();
3319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3320 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3321 pu8Dst, puSrc);
3322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 IEM_MC_END();
3325 }
3326 }
3327 /* No memory operand. */
3328 else
3329 IEMOP_RAISE_INVALID_OPCODE_RET();
3330}
3331
3332
3333/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3334FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3335{
3336 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3338 if (IEM_IS_MODRM_REG_MODE(bRm))
3339 {
3340 /*
3341 * Register, register.
3342 */
3343 if (pVCpu->iem.s.uVexLength == 0)
3344 {
3345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3347 IEM_MC_LOCAL(uint8_t, u8Dst);
3348 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3349 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_PREPARE_AVX_USAGE();
3352 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3353 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3354 pu8Dst, puSrc);
3355 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359 else
3360 {
3361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3363 IEM_MC_LOCAL(uint8_t, u8Dst);
3364 IEM_MC_LOCAL(RTUINT256U, uSrc);
3365 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3366 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3367
3368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3369 IEM_MC_PREPARE_AVX_USAGE();
3370 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3371 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3372 pu8Dst, puSrc);
3373 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 IEM_MC_END();
3376 }
3377 }
3378 /* No memory operand. */
3379 else
3380 IEMOP_RAISE_INVALID_OPCODE_RET();
3381}
3382
3383
3384/* Opcode VEX.F3.0F 0x50 - invalid */
3385/* Opcode VEX.F2.0F 0x50 - invalid */
3386
3387/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3388FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3389{
3390 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3391 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3392 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3393}
3394
3395
3396/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3397FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3398{
3399 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3400 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3402}
3403
3404
3405/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3406FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3407{
3408 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3409 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3410 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3411}
3412
3413
3414/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3415FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3416{
3417 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3418 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3419 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3420}
3421
3422
3423/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3424FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3427 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3429}
3430
3431
3432/* Opcode VEX.66.0F 0x52 - invalid */
3433
3434
3435/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3436FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3437{
3438 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3439 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3440 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3441}
3442
3443
3444/* Opcode VEX.F2.0F 0x52 - invalid */
3445
3446
3447/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3448FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3449{
3450 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3451 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3453}
3454
3455
3456/* Opcode VEX.66.0F 0x53 - invalid */
3457
3458
3459/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3460FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3461{
3462 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3463 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3464 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3465}
3466
3467
3468/* Opcode VEX.F2.0F 0x53 - invalid */
3469
3470
3471/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3472FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3473{
3474 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3475 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3476 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3477}
3478
3479
3480/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3481FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3482{
3483 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3485 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3486}
3487
3488
3489/* Opcode VEX.F3.0F 0x54 - invalid */
3490/* Opcode VEX.F2.0F 0x54 - invalid */
3491
3492
3493/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3494FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3495{
3496 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3497 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3498 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3499}
3500
3501
3502/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3503FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3504{
3505 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3506 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3507 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3508}
3509
3510
3511/* Opcode VEX.F3.0F 0x55 - invalid */
3512/* Opcode VEX.F2.0F 0x55 - invalid */
3513
3514/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3515FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3516{
3517 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3519 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3520}
3521
3522
3523/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3524FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3525{
3526 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3528 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3529}
3530
3531
3532/* Opcode VEX.F3.0F 0x56 - invalid */
3533/* Opcode VEX.F2.0F 0x56 - invalid */
3534
3535
3536/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3537FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3538{
3539 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3541 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3546FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3547{
3548 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3551}
3552
3553
3554/* Opcode VEX.F3.0F 0x57 - invalid */
3555/* Opcode VEX.F2.0F 0x57 - invalid */
3556
3557
3558/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3559FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3560{
3561 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3562 IEMOPMEDIAF3_INIT_VARS( vaddps);
3563 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3568FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3569{
3570 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3573}
3574
3575
3576/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3577FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3582}
3583
3584
3585/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3586FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3587{
3588 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3589 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3590 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3591}
3592
3593
3594/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3595FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3596{
3597 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3598 IEMOPMEDIAF3_INIT_VARS( vmulps);
3599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3600}
3601
3602
3603/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3604FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3605{
3606 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3607 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3609}
3610
3611
3612/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3613FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3614{
3615 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3616 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3617 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3618}
3619
3620
3621/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3622FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3623{
3624 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3625 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3626 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3627}
3628
3629
3630/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3631FNIEMOP_DEF(iemOp_vcvtps2pd_Vpd_Wps)
3632{
3633 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2PD, vcvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3635 if (IEM_IS_MODRM_REG_MODE(bRm))
3636 {
3637 /*
3638 * Register, register.
3639 */
3640 if (pVCpu->iem.s.uVexLength)
3641 {
3642 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3643 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3644 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3645 IEM_MC_PREPARE_AVX_USAGE();
3646
3647 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3648 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3649 IEM_MC_LOCAL( X86YMMREG, uDst);
3650 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3651 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3652 iemAImpl_vcvtps2pd_u256_u128,
3653 iemAImpl_vcvtps2pd_u256_u128_fallback),
3654 puDst, puSrc);
3655 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3656 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3665 IEM_MC_PREPARE_AVX_USAGE();
3666
3667 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
3668 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3669 IEM_MC_LOCAL( X86XMMREG, uDst);
3670 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3671 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3672 iemAImpl_vcvtps2pd_u128_u64,
3673 iemAImpl_vcvtps2pd_u128_u64_fallback),
3674 puDst, pu64Src);
3675 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3676 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3677 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_ADVANCE_RIP_AND_FINISH();
3679 IEM_MC_END();
3680 }
3681 }
3682 else
3683 {
3684 /*
3685 * Register, memory.
3686 */
3687 if (pVCpu->iem.s.uVexLength)
3688 {
3689 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3694 IEM_MC_PREPARE_AVX_USAGE();
3695
3696 IEM_MC_LOCAL(X86XMMREG, uSrc);
3697 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3698 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3699 IEM_MC_LOCAL(X86YMMREG, uDst);
3700 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3701 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3702 iemAImpl_vcvtps2pd_u256_u128,
3703 iemAImpl_vcvtps2pd_u256_u128_fallback),
3704 puDst, puSrc);
3705 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3707 IEM_MC_ADVANCE_RIP_AND_FINISH();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3715 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3716 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3717 IEM_MC_PREPARE_AVX_USAGE();
3718
3719 IEM_MC_LOCAL( uint64_t, u64Src);
3720 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722 IEM_MC_LOCAL( X86XMMREG, uDst);
3723 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3724 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3725 iemAImpl_vcvtps2pd_u128_u64,
3726 iemAImpl_vcvtps2pd_u128_u64_fallback),
3727 puDst, pu64Src);
3728 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3729 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3730 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3731 IEM_MC_ADVANCE_RIP_AND_FINISH();
3732 IEM_MC_END();
3733 }
3734 }
3735}
3736
3737
3738/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3739FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3740/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3741FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3742/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3743FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3744
3745/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3746FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3747/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3748FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3749/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3750FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3751/* Opcode VEX.F2.0F 0x5b - invalid */
3752
3753
3754/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3755FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3756{
3757 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3758 IEMOPMEDIAF3_INIT_VARS( vsubps);
3759 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3760}
3761
3762
3763/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3764FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3765{
3766 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3767 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3768 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3769}
3770
3771
3772/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3773FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3774{
3775 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3776 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3777 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3778}
3779
3780
3781/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3782FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3783{
3784 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3785 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3786 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3787}
3788
3789
3790/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3791FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3792{
3793 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3794 IEMOPMEDIAF3_INIT_VARS( vminps);
3795 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3796}
3797
3798
3799/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3800FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3801{
3802 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3803 IEMOPMEDIAF3_INIT_VARS( vminpd);
3804 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3805}
3806
3807
3808/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3809FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3810{
3811 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3812 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3813 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3814}
3815
3816
3817/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3818FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3819{
3820 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3821 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3822 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3823}
3824
3825
3826/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3827FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3828{
3829 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3830 IEMOPMEDIAF3_INIT_VARS( vdivps);
3831 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3832}
3833
3834
3835/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3836FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3837{
3838 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3839 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3840 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3841}
3842
3843
3844/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3845FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3846{
3847 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3848 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3849 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3850}
3851
3852
3853/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3854FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3855{
3856 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3857 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3858 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3859}
3860
3861
3862/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3863FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3864{
3865 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3866 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3867 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3868}
3869
3870
3871/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3872FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3873{
3874 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3875 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3876 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3877}
3878
3879
3880/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3881FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3882{
3883 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3884 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3885 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3886}
3887
3888
3889/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3890FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3891{
3892 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3893 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3894 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3895}
3896
3897
3898/* Opcode VEX.0F 0x60 - invalid */
3899
3900
3901/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3902FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3903{
3904 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3905 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3906 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3907}
3908
3909
3910/* Opcode VEX.F3.0F 0x60 - invalid */
3911
3912
3913/* Opcode VEX.0F 0x61 - invalid */
3914
3915
3916/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3917FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3918{
3919 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3920 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3921 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3922}
3923
3924
3925/* Opcode VEX.F3.0F 0x61 - invalid */
3926
3927
3928/* Opcode VEX.0F 0x62 - invalid */
3929
3930/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3931FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3932{
3933 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3934 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3935 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3936}
3937
3938
3939/* Opcode VEX.F3.0F 0x62 - invalid */
3940
3941
3942
3943/* Opcode VEX.0F 0x63 - invalid */
3944
3945
3946/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3947FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3948{
3949 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3950 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3951 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3952}
3953
3954
3955/* Opcode VEX.F3.0F 0x63 - invalid */
3956
3957/* Opcode VEX.0F 0x64 - invalid */
3958
3959
3960/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3961FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3962{
3963 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3964 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3966}
3967
3968
3969/* Opcode VEX.F3.0F 0x64 - invalid */
3970
3971/* Opcode VEX.0F 0x65 - invalid */
3972
3973
3974/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3975FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3976{
3977 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3978 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3979 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3980}
3981
3982
3983/* Opcode VEX.F3.0F 0x65 - invalid */
3984
3985/* Opcode VEX.0F 0x66 - invalid */
3986
3987
3988/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3989FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3990{
3991 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3992 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3994}
3995
3996
3997/* Opcode VEX.F3.0F 0x66 - invalid */
3998
3999/* Opcode VEX.0F 0x67 - invalid */
4000
4001
4002/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
4003FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
4004{
4005 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4006 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
4007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4008}
4009
4010
4011/* Opcode VEX.F3.0F 0x67 - invalid */
4012
4013
4014///**
4015// * Common worker for SSE2 instructions on the form:
4016// * pxxxx xmm1, xmm2/mem128
4017// *
4018// * The 2nd operand is the second half of a register, which in the memory case
4019// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
4020// * where it may read the full 128 bits or only the upper 64 bits.
4021// *
4022// * Exceptions type 4.
4023// */
4024//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
4025//{
4026// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4027// if (IEM_IS_MODRM_REG_MODE(bRm))
4028// {
4029// /*
4030// * Register, register.
4031// */
4032// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4033// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4034// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4035// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4036// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4037// IEM_MC_PREPARE_SSE_USAGE();
4038// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4039// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4040// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4041// IEM_MC_ADVANCE_RIP_AND_FINISH();
4042// IEM_MC_END();
4043// }
4044// else
4045// {
4046// /*
4047// * Register, memory.
4048// */
4049// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4050// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4051// IEM_MC_LOCAL(RTUINT128U, uSrc);
4052// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4053// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4054//
4055// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4056// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4057// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4058// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
4059//
4060// IEM_MC_PREPARE_SSE_USAGE();
4061// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4062// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4063//
4064// IEM_MC_ADVANCE_RIP_AND_FINISH();
4065// IEM_MC_END();
4066// }
4067// return VINF_SUCCESS;
4068//}
4069
4070
4071/* Opcode VEX.0F 0x68 - invalid */
4072
4073/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
4074FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
4075{
4076 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4077 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
4078 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4079}
4080
4081
4082/* Opcode VEX.F3.0F 0x68 - invalid */
4083
4084
4085/* Opcode VEX.0F 0x69 - invalid */
4086
4087
4088/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
4089FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
4090{
4091 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4092 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
4093 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4094}
4095
4096
4097/* Opcode VEX.F3.0F 0x69 - invalid */
4098
4099
4100/* Opcode VEX.0F 0x6a - invalid */
4101
4102
4103/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
4104FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
4105{
4106 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4107 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
4108 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4109}
4110
4111
4112/* Opcode VEX.F3.0F 0x6a - invalid */
4113
4114
4115/* Opcode VEX.0F 0x6b - invalid */
4116
4117
4118/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
4119FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
4120{
4121 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4122 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
4123 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4124}
4125
4126
4127/* Opcode VEX.F3.0F 0x6b - invalid */
4128
4129
4130/* Opcode VEX.0F 0x6c - invalid */
4131
4132
4133/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
4134FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
4135{
4136 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4137 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
4138 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4139}
4140
4141
4142/* Opcode VEX.F3.0F 0x6c - invalid */
4143/* Opcode VEX.F2.0F 0x6c - invalid */
4144
4145
4146/* Opcode VEX.0F 0x6d - invalid */
4147
4148
4149/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
4150FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
4151{
4152 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4153 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
4154 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4155}
4156
4157
4158/* Opcode VEX.F3.0F 0x6d - invalid */
4159
4160
4161/* Opcode VEX.0F 0x6e - invalid */
4162
4163FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
4164{
4165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4166 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4167 {
4168 /**
4169 * @opcode 0x6e
4170 * @opcodesub rex.w=1
4171 * @oppfx 0x66
4172 * @opcpuid avx
4173 * @opgroup og_avx_simdint_datamov
4174 * @opxcpttype 5
4175 * @optest 64-bit / op1=1 op2=2 -> op1=2
4176 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4177 */
4178 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4179 if (IEM_IS_MODRM_REG_MODE(bRm))
4180 {
4181 /* XMM, greg64 */
4182 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4183 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4184 IEM_MC_LOCAL(uint64_t, u64Tmp);
4185
4186 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4187 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4188
4189 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4190 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4191
4192 IEM_MC_ADVANCE_RIP_AND_FINISH();
4193 IEM_MC_END();
4194 }
4195 else
4196 {
4197 /* XMM, [mem64] */
4198 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4200 IEM_MC_LOCAL(uint64_t, u64Tmp);
4201
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4203 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4204 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4205 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4206
4207 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4208 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4209
4210 IEM_MC_ADVANCE_RIP_AND_FINISH();
4211 IEM_MC_END();
4212 }
4213 }
4214 else
4215 {
4216 /**
4217 * @opdone
4218 * @opcode 0x6e
4219 * @opcodesub rex.w=0
4220 * @oppfx 0x66
4221 * @opcpuid avx
4222 * @opgroup og_avx_simdint_datamov
4223 * @opxcpttype 5
4224 * @opfunction iemOp_vmovd_q_Vy_Ey
4225 * @optest op1=1 op2=2 -> op1=2
4226 * @optest op1=0 op2=-42 -> op1=-42
4227 */
4228 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4229 if (IEM_IS_MODRM_REG_MODE(bRm))
4230 {
4231 /* XMM, greg32 */
4232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4233 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4234 IEM_MC_LOCAL(uint32_t, u32Tmp);
4235
4236 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4237 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4238
4239 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4240 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4241
4242 IEM_MC_ADVANCE_RIP_AND_FINISH();
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /* XMM, [mem32] */
4248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4250 IEM_MC_LOCAL(uint32_t, u32Tmp);
4251
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4253 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4256
4257 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4258 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4259
4260 IEM_MC_ADVANCE_RIP_AND_FINISH();
4261 IEM_MC_END();
4262 }
4263 }
4264}
4265
4266
4267/* Opcode VEX.F3.0F 0x6e - invalid */
4268
4269
4270/* Opcode VEX.0F 0x6f - invalid */
4271
4272/**
4273 * @opcode 0x6f
4274 * @oppfx 0x66
4275 * @opcpuid avx
4276 * @opgroup og_avx_simdint_datamove
4277 * @opxcpttype 1
4278 * @optest op1=1 op2=2 -> op1=2
4279 * @optest op1=0 op2=-42 -> op1=-42
4280 */
4281FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
4282{
4283 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4284 Assert(pVCpu->iem.s.uVexLength <= 1);
4285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4286 if (IEM_IS_MODRM_REG_MODE(bRm))
4287 {
4288 /*
4289 * Register, register.
4290 */
4291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4292 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4293
4294 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4295 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4296 if (pVCpu->iem.s.uVexLength == 0)
4297 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4298 IEM_GET_MODRM_RM(pVCpu, bRm));
4299 else
4300 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4301 IEM_GET_MODRM_RM(pVCpu, bRm));
4302 IEM_MC_ADVANCE_RIP_AND_FINISH();
4303 IEM_MC_END();
4304 }
4305 else if (pVCpu->iem.s.uVexLength == 0)
4306 {
4307 /*
4308 * Register, memory128.
4309 */
4310 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4311 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4313
4314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4315 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4316 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4317 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4318
4319 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4320 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4321
4322 IEM_MC_ADVANCE_RIP_AND_FINISH();
4323 IEM_MC_END();
4324 }
4325 else
4326 {
4327 /*
4328 * Register, memory256.
4329 */
4330 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4331 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4333
4334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4335 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4336 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4337 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4338
4339 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4340 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4341
4342 IEM_MC_ADVANCE_RIP_AND_FINISH();
4343 IEM_MC_END();
4344 }
4345}
4346
4347/**
4348 * @opcode 0x6f
4349 * @oppfx 0xf3
4350 * @opcpuid avx
4351 * @opgroup og_avx_simdint_datamove
4352 * @opxcpttype 4UA
4353 * @optest op1=1 op2=2 -> op1=2
4354 * @optest op1=0 op2=-42 -> op1=-42
4355 */
4356FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4357{
4358 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4359 Assert(pVCpu->iem.s.uVexLength <= 1);
4360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4361 if (IEM_IS_MODRM_REG_MODE(bRm))
4362 {
4363 /*
4364 * Register, register.
4365 */
4366 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4367 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4368
4369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4370 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4371 if (pVCpu->iem.s.uVexLength == 0)
4372 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4373 IEM_GET_MODRM_RM(pVCpu, bRm));
4374 else
4375 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4376 IEM_GET_MODRM_RM(pVCpu, bRm));
4377 IEM_MC_ADVANCE_RIP_AND_FINISH();
4378 IEM_MC_END();
4379 }
4380 else if (pVCpu->iem.s.uVexLength == 0)
4381 {
4382 /*
4383 * Register, memory128.
4384 */
4385 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4386 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4388
4389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4390 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4391 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4392 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4393
4394 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4395 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4396
4397 IEM_MC_ADVANCE_RIP_AND_FINISH();
4398 IEM_MC_END();
4399 }
4400 else
4401 {
4402 /*
4403 * Register, memory256.
4404 */
4405 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4406 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4408
4409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4410 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4411 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4412 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4413
4414 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4415 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4416
4417 IEM_MC_ADVANCE_RIP_AND_FINISH();
4418 IEM_MC_END();
4419 }
4420}
4421
4422
4423/* Opcode VEX.0F 0x70 - invalid */
4424
4425
4426/**
4427 * Common worker for AVX/AVX2 instructions on the forms:
4428 * - vpxxx xmm0, xmm2/mem128, imm8
4429 * - vpxxx ymm0, ymm2/mem256, imm8
4430 *
4431 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4432 */
4433FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4434{
4435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4436 if (IEM_IS_MODRM_REG_MODE(bRm))
4437 {
4438 /*
4439 * Register, register.
4440 */
4441 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4442 if (pVCpu->iem.s.uVexLength)
4443 {
4444 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4445 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4446 IEM_MC_LOCAL(RTUINT256U, uDst);
4447 IEM_MC_LOCAL(RTUINT256U, uSrc);
4448 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4449 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4450 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4451 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4452 IEM_MC_PREPARE_AVX_USAGE();
4453 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4454 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4455 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4456 IEM_MC_ADVANCE_RIP_AND_FINISH();
4457 IEM_MC_END();
4458 }
4459 else
4460 {
4461 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4462 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4463 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4464 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4465 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4467 IEM_MC_PREPARE_AVX_USAGE();
4468 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4469 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4470 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4471 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4472 IEM_MC_ADVANCE_RIP_AND_FINISH();
4473 IEM_MC_END();
4474 }
4475 }
4476 else
4477 {
4478 /*
4479 * Register, memory.
4480 */
4481 if (pVCpu->iem.s.uVexLength)
4482 {
4483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4484 IEM_MC_LOCAL(RTUINT256U, uDst);
4485 IEM_MC_LOCAL(RTUINT256U, uSrc);
4486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4487 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4488 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4489
4490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4491 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4492 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4493 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4494 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4495 IEM_MC_PREPARE_AVX_USAGE();
4496
4497 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4498 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4499 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4500
4501 IEM_MC_ADVANCE_RIP_AND_FINISH();
4502 IEM_MC_END();
4503 }
4504 else
4505 {
4506 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4507 IEM_MC_LOCAL(RTUINT128U, uSrc);
4508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4509 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4510 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4511
4512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4513 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4515 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4517 IEM_MC_PREPARE_AVX_USAGE();
4518
4519 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4520 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4521 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4522 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4523
4524 IEM_MC_ADVANCE_RIP_AND_FINISH();
4525 IEM_MC_END();
4526 }
4527 }
4528}
4529
4530
4531/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4532FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4533{
4534 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4535 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4536 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4537
4538}
4539
4540
4541/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4542FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4543{
4544 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4545 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4546 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4547
4548}
4549
4550
4551/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4552FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4553{
4554 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4555 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4556 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4557}
4558
4559
4560/**
4561 * Common worker(s) for AVX/AVX2 instructions on the forms:
4562 * - vpxxx xmm0, xmm2, imm8
4563 * - vpxxx ymm0, ymm2, imm8
4564 *
4565 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4566 */
4567FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4568{
4569 if (IEM_IS_MODRM_REG_MODE(bRm))
4570 {
4571 /*
4572 * Register, register.
4573 */
4574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4576 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4577 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4578 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4579 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4580 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4581 IEM_MC_PREPARE_AVX_USAGE();
4582 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4583 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4584 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4585 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4586 IEM_MC_ADVANCE_RIP_AND_FINISH();
4587 IEM_MC_END();
4588 }
4589 /* No memory operand. */
4590 else
4591 IEMOP_RAISE_INVALID_OPCODE_RET();
4592}
4593
4594FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4595{
4596 if (IEM_IS_MODRM_REG_MODE(bRm))
4597 {
4598 /*
4599 * Register, register.
4600 */
4601 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4602 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4603 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4604 IEM_MC_LOCAL(RTUINT256U, uDst);
4605 IEM_MC_LOCAL(RTUINT256U, uSrc);
4606 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4607 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4608 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4610 IEM_MC_PREPARE_AVX_USAGE();
4611 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4612 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4613 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4614 IEM_MC_ADVANCE_RIP_AND_FINISH();
4615 IEM_MC_END();
4616 }
4617 /* No memory operand. */
4618 else
4619 IEMOP_RAISE_INVALID_OPCODE_RET();
4620}
4621
4622
4623/* Opcode VEX.0F 0x71 11/2 - invalid. */
4624/** Opcode VEX.66.0F 0x71 11/2. */
4625FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4626{
4627 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4628 if (pVCpu->iem.s.uVexLength)
4629 {
4630 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4631 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4632 }
4633 else
4634 {
4635 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4636 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4637 }
4638}
4639
4640
4641/* Opcode VEX.0F 0x71 11/4 - invalid */
4642/** Opcode VEX.66.0F 0x71 11/4. */
4643FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4644{
4645 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4646 if (pVCpu->iem.s.uVexLength)
4647 {
4648 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4649 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4650 }
4651 else
4652 {
4653 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4654 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4655 }
4656}
4657
4658/* Opcode VEX.0F 0x71 11/6 - invalid */
4659
4660/** Opcode VEX.66.0F 0x71 11/6. */
4661FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4662{
4663 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4664 if (pVCpu->iem.s.uVexLength)
4665 {
4666 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4667 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4668 }
4669 else
4670 {
4671 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4672 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4673 }
4674}
4675
4676
4677/**
4678 * VEX Group 12 jump table for register variant.
4679 */
4680IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4681{
4682 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4683 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4684 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4685 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4686 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4687 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4688 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4689 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4690};
4691AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4692
4693
4694/** Opcode VEX.0F 0x71. */
4695FNIEMOP_DEF(iemOp_VGrp12)
4696{
4697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4698 if (IEM_IS_MODRM_REG_MODE(bRm))
4699 /* register, register */
4700 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4701 + pVCpu->iem.s.idxPrefix], bRm);
4702 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4703}
4704
4705
4706/* Opcode VEX.0F 0x72 11/2 - invalid. */
4707/** Opcode VEX.66.0F 0x72 11/2. */
4708FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4709{
4710 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4711 if (pVCpu->iem.s.uVexLength)
4712 {
4713 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4714 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4715 }
4716 else
4717 {
4718 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4719 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4720 }
4721}
4722
4723
4724/* Opcode VEX.0F 0x72 11/4 - invalid. */
4725/** Opcode VEX.66.0F 0x72 11/4. */
4726FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4727{
4728 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4729 if (pVCpu->iem.s.uVexLength)
4730 {
4731 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4732 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4733 }
4734 else
4735 {
4736 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4737 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4738 }
4739}
4740
4741/* Opcode VEX.0F 0x72 11/6 - invalid. */
4742
4743/** Opcode VEX.66.0F 0x72 11/6. */
4744FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4745{
4746 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4747 if (pVCpu->iem.s.uVexLength)
4748 {
4749 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4750 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4751 }
4752 else
4753 {
4754 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4755 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4756 }
4757}
4758
4759
4760/**
4761 * Group 13 jump table for register variant.
4762 */
4763IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4764{
4765 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4766 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4767 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4768 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4769 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4770 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4771 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4772 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4773};
4774AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4775
4776/** Opcode VEX.0F 0x72. */
4777FNIEMOP_DEF(iemOp_VGrp13)
4778{
4779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4780 if (IEM_IS_MODRM_REG_MODE(bRm))
4781 /* register, register */
4782 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4783 + pVCpu->iem.s.idxPrefix], bRm);
4784 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4785}
4786
4787
4788/* Opcode VEX.0F 0x73 11/2 - invalid. */
4789/** Opcode VEX.66.0F 0x73 11/2. */
4790FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4791{
4792 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4793 if (pVCpu->iem.s.uVexLength)
4794 {
4795 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4796 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4797 }
4798 else
4799 {
4800 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4801 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4802 }
4803}
4804
4805
4806/** Opcode VEX.66.0F 0x73 11/3. */
4807FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4808{
4809 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4810 if (pVCpu->iem.s.uVexLength)
4811 {
4812 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4813 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4814 }
4815 else
4816 {
4817 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4818 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4819 }
4820}
4821
4822/* Opcode VEX.0F 0x73 11/6 - invalid. */
4823
4824/** Opcode VEX.66.0F 0x73 11/6. */
4825FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4826{
4827 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4828 if (pVCpu->iem.s.uVexLength)
4829 {
4830 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4831 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4832 }
4833 else
4834 {
4835 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4836 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4837 }
4838}
4839
4840/** Opcode VEX.66.0F 0x73 11/7. */
4841FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4842{
4843 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4844 if (pVCpu->iem.s.uVexLength)
4845 {
4846 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4847 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4848 }
4849 else
4850 {
4851 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4852 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4853 }
4854}
4855
4856/* Opcode VEX.0F 0x73 11/6 - invalid. */
4857
4858/**
4859 * Group 14 jump table for register variant.
4860 */
4861IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4862{
4863 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4864 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4865 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4866 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4867 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4868 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4869 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4870 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4871};
4872AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4873
4874
4875/** Opcode VEX.0F 0x73. */
4876FNIEMOP_DEF(iemOp_VGrp14)
4877{
4878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4879 if (IEM_IS_MODRM_REG_MODE(bRm))
4880 /* register, register */
4881 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4882 + pVCpu->iem.s.idxPrefix], bRm);
4883 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4884}
4885
4886
4887/* Opcode VEX.0F 0x74 - invalid */
4888
4889
4890/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4891FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4892{
4893 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4894 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4895 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4896}
4897
4898/* Opcode VEX.F3.0F 0x74 - invalid */
4899/* Opcode VEX.F2.0F 0x74 - invalid */
4900
4901
4902/* Opcode VEX.0F 0x75 - invalid */
4903
4904
4905/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4906FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4907{
4908 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4909 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4910 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4911}
4912
4913
4914/* Opcode VEX.F3.0F 0x75 - invalid */
4915/* Opcode VEX.F2.0F 0x75 - invalid */
4916
4917
4918/* Opcode VEX.0F 0x76 - invalid */
4919
4920
4921/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4922FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4923{
4924 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4925 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4926 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4927}
4928
4929
4930/* Opcode VEX.F3.0F 0x76 - invalid */
4931/* Opcode VEX.F2.0F 0x76 - invalid */
4932
4933
4934/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4935FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4936{
4937 Assert(pVCpu->iem.s.uVexLength <= 1);
4938 if (pVCpu->iem.s.uVexLength == 0)
4939 {
4940 /*
4941 * 128-bit: vzeroupper
4942 */
4943 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4944 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4945
4946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4949
4950 IEM_MC_CLEAR_YREG_128_UP(0);
4951 IEM_MC_CLEAR_YREG_128_UP(1);
4952 IEM_MC_CLEAR_YREG_128_UP(2);
4953 IEM_MC_CLEAR_YREG_128_UP(3);
4954 IEM_MC_CLEAR_YREG_128_UP(4);
4955 IEM_MC_CLEAR_YREG_128_UP(5);
4956 IEM_MC_CLEAR_YREG_128_UP(6);
4957 IEM_MC_CLEAR_YREG_128_UP(7);
4958
4959 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4960 {
4961 IEM_MC_CLEAR_YREG_128_UP( 8);
4962 IEM_MC_CLEAR_YREG_128_UP( 9);
4963 IEM_MC_CLEAR_YREG_128_UP(10);
4964 IEM_MC_CLEAR_YREG_128_UP(11);
4965 IEM_MC_CLEAR_YREG_128_UP(12);
4966 IEM_MC_CLEAR_YREG_128_UP(13);
4967 IEM_MC_CLEAR_YREG_128_UP(14);
4968 IEM_MC_CLEAR_YREG_128_UP(15);
4969 }
4970
4971 IEM_MC_ADVANCE_RIP_AND_FINISH();
4972 IEM_MC_END();
4973 }
4974 else
4975 {
4976 /*
4977 * 256-bit: vzeroall
4978 */
4979 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4980 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4981
4982 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4983 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4984 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4985
4986 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4987 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4988 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4989 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4990 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4991 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4992 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4993 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4994 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4995
4996 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4997 {
4998 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4999 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
5000 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
5001 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
5002 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
5003 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
5004 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
5005 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
5006 }
5007
5008 IEM_MC_ADVANCE_RIP_AND_FINISH();
5009 IEM_MC_END();
5010 }
5011}
5012
5013
5014/* Opcode VEX.66.0F 0x77 - invalid */
5015/* Opcode VEX.F3.0F 0x77 - invalid */
5016/* Opcode VEX.F2.0F 0x77 - invalid */
5017
5018/* Opcode VEX.0F 0x78 - invalid */
5019/* Opcode VEX.66.0F 0x78 - invalid */
5020/* Opcode VEX.F3.0F 0x78 - invalid */
5021/* Opcode VEX.F2.0F 0x78 - invalid */
5022
5023/* Opcode VEX.0F 0x79 - invalid */
5024/* Opcode VEX.66.0F 0x79 - invalid */
5025/* Opcode VEX.F3.0F 0x79 - invalid */
5026/* Opcode VEX.F2.0F 0x79 - invalid */
5027
5028/* Opcode VEX.0F 0x7a - invalid */
5029/* Opcode VEX.66.0F 0x7a - invalid */
5030/* Opcode VEX.F3.0F 0x7a - invalid */
5031/* Opcode VEX.F2.0F 0x7a - invalid */
5032
5033/* Opcode VEX.0F 0x7b - invalid */
5034/* Opcode VEX.66.0F 0x7b - invalid */
5035/* Opcode VEX.F3.0F 0x7b - invalid */
5036/* Opcode VEX.F2.0F 0x7b - invalid */
5037
5038/* Opcode VEX.0F 0x7c - invalid */
5039
5040
5041/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
5042FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
5043{
5044 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5045 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
5046 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5047}
5048
5049
5050/* Opcode VEX.F3.0F 0x7c - invalid */
5051
5052
5053/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
5054FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
5055{
5056 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5057 IEMOPMEDIAF3_INIT_VARS( vhaddps);
5058 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5059}
5060
5061
5062/* Opcode VEX.0F 0x7d - invalid */
5063
5064
5065/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
5066FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
5067{
5068 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5069 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
5070 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5071}
5072
5073
5074/* Opcode VEX.F3.0F 0x7d - invalid */
5075
5076
5077/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
5078FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
5079{
5080 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5081 IEMOPMEDIAF3_INIT_VARS( vhsubps);
5082 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5083}
5084
5085
5086/* Opcode VEX.0F 0x7e - invalid */
5087
5088FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
5089{
5090 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5092 {
5093 /**
5094 * @opcode 0x7e
5095 * @opcodesub rex.w=1
5096 * @oppfx 0x66
5097 * @opcpuid avx
5098 * @opgroup og_avx_simdint_datamov
5099 * @opxcpttype 5
5100 * @optest 64-bit / op1=1 op2=2 -> op1=2
5101 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5102 */
5103 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5104 if (IEM_IS_MODRM_REG_MODE(bRm))
5105 {
5106 /* greg64, XMM */
5107 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5108 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5109 IEM_MC_LOCAL(uint64_t, u64Tmp);
5110
5111 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5112 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5113
5114 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5115 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5116
5117 IEM_MC_ADVANCE_RIP_AND_FINISH();
5118 IEM_MC_END();
5119 }
5120 else
5121 {
5122 /* [mem64], XMM */
5123 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5125 IEM_MC_LOCAL(uint64_t, u64Tmp);
5126
5127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5128 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5129 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5130 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5131
5132 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5133 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5134
5135 IEM_MC_ADVANCE_RIP_AND_FINISH();
5136 IEM_MC_END();
5137 }
5138 }
5139 else
5140 {
5141 /**
5142 * @opdone
5143 * @opcode 0x7e
5144 * @opcodesub rex.w=0
5145 * @oppfx 0x66
5146 * @opcpuid avx
5147 * @opgroup og_avx_simdint_datamov
5148 * @opxcpttype 5
5149 * @opfunction iemOp_vmovd_q_Vy_Ey
5150 * @optest op1=1 op2=2 -> op1=2
5151 * @optest op1=0 op2=-42 -> op1=-42
5152 */
5153 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5154 if (IEM_IS_MODRM_REG_MODE(bRm))
5155 {
5156 /* greg32, XMM */
5157 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5158 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5159 IEM_MC_LOCAL(uint32_t, u32Tmp);
5160
5161 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5162 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5163
5164 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5165 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5166
5167 IEM_MC_ADVANCE_RIP_AND_FINISH();
5168 IEM_MC_END();
5169 }
5170 else
5171 {
5172 /* [mem32], XMM */
5173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5175 IEM_MC_LOCAL(uint32_t, u32Tmp);
5176
5177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5178 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5179 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5180 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5181
5182 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5183 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5184
5185 IEM_MC_ADVANCE_RIP_AND_FINISH();
5186 IEM_MC_END();
5187 }
5188 }
5189}
5190
5191
5192/**
5193 * @opcode 0x7e
5194 * @oppfx 0xf3
5195 * @opcpuid avx
5196 * @opgroup og_avx_pcksclr_datamove
5197 * @opxcpttype none
5198 * @optest op1=1 op2=2 -> op1=2
5199 * @optest op1=0 op2=-42 -> op1=-42
5200 */
5201FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
5202{
5203 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5205 if (IEM_IS_MODRM_REG_MODE(bRm))
5206 {
5207 /*
5208 * Register, register.
5209 */
5210 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5211 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5212
5213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5215
5216 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
5217 IEM_GET_MODRM_RM(pVCpu, bRm));
5218 IEM_MC_ADVANCE_RIP_AND_FINISH();
5219 IEM_MC_END();
5220 }
5221 else
5222 {
5223 /*
5224 * Memory, register.
5225 */
5226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5227 IEM_MC_LOCAL(uint64_t, uSrc);
5228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5229
5230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5231 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5232 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5233 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5234
5235 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5236 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5237
5238 IEM_MC_ADVANCE_RIP_AND_FINISH();
5239 IEM_MC_END();
5240 }
5241
5242}
5243/* Opcode VEX.F2.0F 0x7e - invalid */
5244
5245
5246/* Opcode VEX.0F 0x7f - invalid */
5247
5248/**
5249 * @opcode 0x7f
5250 * @oppfx 0x66
5251 * @opcpuid avx
5252 * @opgroup og_avx_simdint_datamove
5253 * @opxcpttype 1
5254 * @optest op1=1 op2=2 -> op1=2
5255 * @optest op1=0 op2=-42 -> op1=-42
5256 */
5257FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
5258{
5259 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5260 Assert(pVCpu->iem.s.uVexLength <= 1);
5261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5262 if (IEM_IS_MODRM_REG_MODE(bRm))
5263 {
5264 /*
5265 * Register, register.
5266 */
5267 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5268 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5269
5270 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5271 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5272 if (pVCpu->iem.s.uVexLength == 0)
5273 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5274 IEM_GET_MODRM_REG(pVCpu, bRm));
5275 else
5276 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5277 IEM_GET_MODRM_REG(pVCpu, bRm));
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 }
5281 else if (pVCpu->iem.s.uVexLength == 0)
5282 {
5283 /*
5284 * Register, memory128.
5285 */
5286 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5287 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5289
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5291 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5292 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5293 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5294
5295 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5296 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5297
5298 IEM_MC_ADVANCE_RIP_AND_FINISH();
5299 IEM_MC_END();
5300 }
5301 else
5302 {
5303 /*
5304 * Register, memory256.
5305 */
5306 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5307 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5309
5310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5311 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5312 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5313 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5314
5315 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5316 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5317
5318 IEM_MC_ADVANCE_RIP_AND_FINISH();
5319 IEM_MC_END();
5320 }
5321}
5322
5323
5324/**
5325 * @opcode 0x7f
5326 * @oppfx 0xf3
5327 * @opcpuid avx
5328 * @opgroup og_avx_simdint_datamove
5329 * @opxcpttype 4UA
5330 * @optest op1=1 op2=2 -> op1=2
5331 * @optest op1=0 op2=-42 -> op1=-42
5332 */
5333FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5334{
5335 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5336 Assert(pVCpu->iem.s.uVexLength <= 1);
5337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5338 if (IEM_IS_MODRM_REG_MODE(bRm))
5339 {
5340 /*
5341 * Register, register.
5342 */
5343 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5344 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5345
5346 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5347 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5348 if (pVCpu->iem.s.uVexLength == 0)
5349 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5350 IEM_GET_MODRM_REG(pVCpu, bRm));
5351 else
5352 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5353 IEM_GET_MODRM_REG(pVCpu, bRm));
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357 else if (pVCpu->iem.s.uVexLength == 0)
5358 {
5359 /*
5360 * Register, memory128.
5361 */
5362 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5363 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5365
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5367 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5370
5371 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5372 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5373
5374 IEM_MC_ADVANCE_RIP_AND_FINISH();
5375 IEM_MC_END();
5376 }
5377 else
5378 {
5379 /*
5380 * Register, memory256.
5381 */
5382 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5383 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5385
5386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5387 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5388 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5389 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5390
5391 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5392 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5393
5394 IEM_MC_ADVANCE_RIP_AND_FINISH();
5395 IEM_MC_END();
5396 }
5397}
5398
5399/* Opcode VEX.F2.0F 0x7f - invalid */
5400
5401
5402/* Opcode VEX.0F 0x80 - invalid */
5403/* Opcode VEX.0F 0x81 - invalid */
5404/* Opcode VEX.0F 0x82 - invalid */
5405/* Opcode VEX.0F 0x83 - invalid */
5406/* Opcode VEX.0F 0x84 - invalid */
5407/* Opcode VEX.0F 0x85 - invalid */
5408/* Opcode VEX.0F 0x86 - invalid */
5409/* Opcode VEX.0F 0x87 - invalid */
5410/* Opcode VEX.0F 0x88 - invalid */
5411/* Opcode VEX.0F 0x89 - invalid */
5412/* Opcode VEX.0F 0x8a - invalid */
5413/* Opcode VEX.0F 0x8b - invalid */
5414/* Opcode VEX.0F 0x8c - invalid */
5415/* Opcode VEX.0F 0x8d - invalid */
5416/* Opcode VEX.0F 0x8e - invalid */
5417/* Opcode VEX.0F 0x8f - invalid */
5418/* Opcode VEX.0F 0x90 - invalid */
5419/* Opcode VEX.0F 0x91 - invalid */
5420/* Opcode VEX.0F 0x92 - invalid */
5421/* Opcode VEX.0F 0x93 - invalid */
5422/* Opcode VEX.0F 0x94 - invalid */
5423/* Opcode VEX.0F 0x95 - invalid */
5424/* Opcode VEX.0F 0x96 - invalid */
5425/* Opcode VEX.0F 0x97 - invalid */
5426/* Opcode VEX.0F 0x98 - invalid */
5427/* Opcode VEX.0F 0x99 - invalid */
5428/* Opcode VEX.0F 0x9a - invalid */
5429/* Opcode VEX.0F 0x9b - invalid */
5430/* Opcode VEX.0F 0x9c - invalid */
5431/* Opcode VEX.0F 0x9d - invalid */
5432/* Opcode VEX.0F 0x9e - invalid */
5433/* Opcode VEX.0F 0x9f - invalid */
5434/* Opcode VEX.0F 0xa0 - invalid */
5435/* Opcode VEX.0F 0xa1 - invalid */
5436/* Opcode VEX.0F 0xa2 - invalid */
5437/* Opcode VEX.0F 0xa3 - invalid */
5438/* Opcode VEX.0F 0xa4 - invalid */
5439/* Opcode VEX.0F 0xa5 - invalid */
5440/* Opcode VEX.0F 0xa6 - invalid */
5441/* Opcode VEX.0F 0xa7 - invalid */
5442/* Opcode VEX.0F 0xa8 - invalid */
5443/* Opcode VEX.0F 0xa9 - invalid */
5444/* Opcode VEX.0F 0xaa - invalid */
5445/* Opcode VEX.0F 0xab - invalid */
5446/* Opcode VEX.0F 0xac - invalid */
5447/* Opcode VEX.0F 0xad - invalid */
5448
5449
5450/* Opcode VEX.0F 0xae mem/0 - invalid. */
5451/* Opcode VEX.0F 0xae mem/1 - invalid. */
5452
5453/**
5454 * @ opmaps grp15
5455 * @ opcode !11/2
5456 * @ oppfx none
5457 * @ opcpuid sse
5458 * @ opgroup og_sse_mxcsrsm
5459 * @ opxcpttype 5
5460 * @ optest op1=0 -> mxcsr=0
5461 * @ optest op1=0x2083 -> mxcsr=0x2083
5462 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5463 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5464 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5465 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5466 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5467 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5468 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5469 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5470 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5471 */
5472FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
5473//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5474//{
5475// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5476// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5477// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5478// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5479// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5480// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5481// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5482// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5483// IEM_MC_END();
5484// return VINF_SUCCESS;
5485//}
5486
5487
5488/**
5489 * @opmaps vexgrp15
5490 * @opcode !11/3
5491 * @oppfx none
5492 * @opcpuid avx
5493 * @opgroup og_avx_mxcsrsm
5494 * @opxcpttype 5
5495 * @optest mxcsr=0 -> op1=0
5496 * @optest mxcsr=0x2083 -> op1=0x2083
5497 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5498 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5499 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5500 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5501 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5502 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5503 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5504 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5505 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5506 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5507 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5508 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5509 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5510 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5511 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5512 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5513 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5514 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5515 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5516 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5517 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5518 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5519 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5520 * -> value.xcpt=0x6
5521 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5522 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5523 * APMv4 rev 3.17 page 509.
5524 * @todo Test this instruction on AMD Ryzen.
5525 */
5526FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5527{
5528 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5529 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5530 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5532 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5534 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5535 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5536 IEM_MC_END();
5537}
5538
5539/* Opcode VEX.0F 0xae mem/4 - invalid. */
5540/* Opcode VEX.0F 0xae mem/5 - invalid. */
5541/* Opcode VEX.0F 0xae mem/6 - invalid. */
5542/* Opcode VEX.0F 0xae mem/7 - invalid. */
5543
5544/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5545/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5546/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5547/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5548/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5549/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5550/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5551/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5552
5553/**
5554 * Vex group 15 jump table for memory variant.
5555 */
5556IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5557{ /* pfx: none, 066h, 0f3h, 0f2h */
5558 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5559 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5560 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5561 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5562 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5563 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5564 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5565 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5566};
5567AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5568
5569
5570/** Opcode vex. 0xae. */
5571FNIEMOP_DEF(iemOp_VGrp15)
5572{
5573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5574 if (IEM_IS_MODRM_REG_MODE(bRm))
5575 /* register, register */
5576 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5577
5578 /* memory, register */
5579 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5580 + pVCpu->iem.s.idxPrefix], bRm);
5581}
5582
5583
5584/* Opcode VEX.0F 0xaf - invalid. */
5585
5586/* Opcode VEX.0F 0xb0 - invalid. */
5587/* Opcode VEX.0F 0xb1 - invalid. */
5588/* Opcode VEX.0F 0xb2 - invalid. */
5589/* Opcode VEX.0F 0xb2 - invalid. */
5590/* Opcode VEX.0F 0xb3 - invalid. */
5591/* Opcode VEX.0F 0xb4 - invalid. */
5592/* Opcode VEX.0F 0xb5 - invalid. */
5593/* Opcode VEX.0F 0xb6 - invalid. */
5594/* Opcode VEX.0F 0xb7 - invalid. */
5595/* Opcode VEX.0F 0xb8 - invalid. */
5596/* Opcode VEX.0F 0xb9 - invalid. */
5597/* Opcode VEX.0F 0xba - invalid. */
5598/* Opcode VEX.0F 0xbb - invalid. */
5599/* Opcode VEX.0F 0xbc - invalid. */
5600/* Opcode VEX.0F 0xbd - invalid. */
5601/* Opcode VEX.0F 0xbe - invalid. */
5602/* Opcode VEX.0F 0xbf - invalid. */
5603
5604/* Opcode VEX.0F 0xc0 - invalid. */
5605/* Opcode VEX.66.0F 0xc0 - invalid. */
5606/* Opcode VEX.F3.0F 0xc0 - invalid. */
5607/* Opcode VEX.F2.0F 0xc0 - invalid. */
5608
5609/* Opcode VEX.0F 0xc1 - invalid. */
5610/* Opcode VEX.66.0F 0xc1 - invalid. */
5611/* Opcode VEX.F3.0F 0xc1 - invalid. */
5612/* Opcode VEX.F2.0F 0xc1 - invalid. */
5613
5614#define IEMOP_VCMPP_BODY(a_Instr) \
5615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5616 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5617 { \
5618 /* \
5619 * Register, Register. \
5620 */ \
5621 if (pVCpu->iem.s.uVexLength) \
5622 { \
5623 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5625 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5626 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5627 IEM_MC_PREPARE_AVX_USAGE(); \
5628 IEM_MC_LOCAL(X86YMMREG, uDst); \
5629 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5630 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5631 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5632 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5633 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5634 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5635 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5636 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5637 puDst, puSrc, bImmArg); \
5638 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5639 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5640 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5641 IEM_MC_END(); \
5642 } \
5643 else \
5644 { \
5645 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5646 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5647 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5648 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5649 IEM_MC_PREPARE_AVX_USAGE(); \
5650 IEM_MC_LOCAL(X86XMMREG, uDst); \
5651 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5652 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5653 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5654 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5655 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5656 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5657 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5658 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5659 puDst, puSrc, bImmArg); \
5660 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5661 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5662 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5663 IEM_MC_END(); \
5664 } \
5665 } \
5666 else \
5667 { \
5668 /* \
5669 * Register, Memory. \
5670 */ \
5671 if (pVCpu->iem.s.uVexLength) \
5672 { \
5673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5676 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5677 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5679 IEM_MC_PREPARE_AVX_USAGE(); \
5680 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5681 IEM_MC_LOCAL(X86YMMREG, uDst); \
5682 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5683 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5684 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5685 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5686 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5687 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5688 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5689 puDst, puSrc, bImmArg); \
5690 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5691 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5693 IEM_MC_END(); \
5694 } \
5695 else \
5696 { \
5697 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5700 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5701 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5702 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5703 IEM_MC_PREPARE_AVX_USAGE(); \
5704 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5705 IEM_MC_LOCAL(X86XMMREG, uDst); \
5706 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5707 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5708 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5709 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5710 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5711 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5712 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5713 puDst, puSrc, bImmArg); \
5714 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5715 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5717 IEM_MC_END(); \
5718 } \
5719 } \
5720 (void)0
5721
5722
5723/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5724FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5725{
5726 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5727 IEMOP_VCMPP_BODY(vcmpps);
5728}
5729
5730
5731/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5732FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5733{
5734 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5735 IEMOP_VCMPP_BODY(vcmppd);
5736}
5737
5738
5739/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5740FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5741{
5742 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5743
5744 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5745 if (IEM_IS_MODRM_REG_MODE(bRm))
5746 {
5747 /*
5748 * XMM32, XMM32.
5749 */
5750 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5751 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5752 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5753 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5754 IEM_MC_PREPARE_AVX_USAGE();
5755 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5756 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5757 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5758 IEM_MC_LOCAL(X86XMMREG, uDst);
5759 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5760 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5761 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5762 puDst, puSrc, bImmArg);
5763 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5764 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5765
5766 IEM_MC_ADVANCE_RIP_AND_FINISH();
5767 IEM_MC_END();
5768 }
5769 else
5770 {
5771 /*
5772 * XMM32, [mem32].
5773 */
5774 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5775
5776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5778 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5779 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5780 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5781 IEM_MC_PREPARE_AVX_USAGE();
5782
5783 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5784 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5785 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5786 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5787 IEM_MC_LOCAL(X86XMMREG, uDst);
5788 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5789 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5790 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5791 puDst, puSrc, bImmArg);
5792 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5793 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5794
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 }
5798}
5799
5800
5801/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5802FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5803{
5804 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5805
5806 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5807 if (IEM_IS_MODRM_REG_MODE(bRm))
5808 {
5809 /*
5810 * XMM64, XMM64.
5811 */
5812 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5813 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5814 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5816 IEM_MC_PREPARE_AVX_USAGE();
5817 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5818 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5819 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5820 IEM_MC_LOCAL(X86XMMREG, uDst);
5821 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5822 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5823 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5824 puDst, puSrc, bImmArg);
5825 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5826 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5827
5828 IEM_MC_ADVANCE_RIP_AND_FINISH();
5829 IEM_MC_END();
5830 }
5831 else
5832 {
5833 /*
5834 * XMM64, [mem64].
5835 */
5836 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5837
5838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5840 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5841 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5843 IEM_MC_PREPARE_AVX_USAGE();
5844
5845 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5846 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5847 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5848 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5849 IEM_MC_LOCAL(X86XMMREG, uDst);
5850 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5851 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5852 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5853 puDst, puSrc, bImmArg);
5854 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5855 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5856
5857 IEM_MC_ADVANCE_RIP_AND_FINISH();
5858 IEM_MC_END();
5859 }
5860}
5861
5862
5863/* Opcode VEX.0F 0xc3 - invalid */
5864/* Opcode VEX.66.0F 0xc3 - invalid */
5865/* Opcode VEX.F3.0F 0xc3 - invalid */
5866/* Opcode VEX.F2.0F 0xc3 - invalid */
5867
5868/* Opcode VEX.0F 0xc4 - invalid */
5869
5870
5871/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5872FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5873{
5874 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5876 if (IEM_IS_MODRM_REG_MODE(bRm))
5877 {
5878 /*
5879 * Register, register.
5880 */
5881 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5882 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5883 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5884 IEM_MC_LOCAL(uint16_t, uValue);
5885
5886 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5888 IEM_MC_PREPARE_AVX_USAGE();
5889
5890 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5891 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5892 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5893 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5894 IEM_MC_ADVANCE_RIP_AND_FINISH();
5895 IEM_MC_END();
5896 }
5897 else
5898 {
5899 /*
5900 * Register, memory.
5901 */
5902 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5904 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5905 IEM_MC_LOCAL(uint16_t, uValue);
5906
5907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5909 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5910 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5911 IEM_MC_PREPARE_AVX_USAGE();
5912
5913 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5914 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5915 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5916 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5917 IEM_MC_ADVANCE_RIP_AND_FINISH();
5918 IEM_MC_END();
5919 }
5920}
5921
5922
5923/* Opcode VEX.F3.0F 0xc4 - invalid */
5924/* Opcode VEX.F2.0F 0xc4 - invalid */
5925
5926/* Opcode VEX.0F 0xc5 - invalid */
5927
5928
5929/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
5930FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
5931{
5932 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
5933 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5934 if (IEM_IS_MODRM_REG_MODE(bRm))
5935 {
5936 /*
5937 * greg32, XMM, imm8.
5938 */
5939 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5941 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5942 IEM_MC_LOCAL(uint16_t, uValue);
5943 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5944 IEM_MC_PREPARE_AVX_USAGE();
5945 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
5946 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
5947 IEM_MC_ADVANCE_RIP_AND_FINISH();
5948 IEM_MC_END();
5949 }
5950 /* No memory operand. */
5951 else
5952 IEMOP_RAISE_INVALID_OPCODE_RET();
5953}
5954
5955
5956/* Opcode VEX.F3.0F 0xc5 - invalid */
5957/* Opcode VEX.F2.0F 0xc5 - invalid */
5958
5959
5960#define VSHUFP_X(a_Instr) \
5961 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5962 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5963 { \
5964 /* \
5965 * Register, register. \
5966 */ \
5967 if (pVCpu->iem.s.uVexLength) \
5968 { \
5969 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5970 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5971 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5972 IEM_MC_LOCAL(RTUINT256U, uDst); \
5973 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5974 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5975 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5976 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5977 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5978 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5979 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5980 IEM_MC_PREPARE_AVX_USAGE(); \
5981 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5982 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5983 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5984 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5985 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5986 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5987 IEM_MC_END(); \
5988 } \
5989 else \
5990 { \
5991 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5993 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5994 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5995 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5996 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
5997 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5999 IEM_MC_PREPARE_AVX_USAGE(); \
6000 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6001 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6002 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6003 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6004 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6005 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6007 IEM_MC_END(); \
6008 } \
6009 } \
6010 else \
6011 { \
6012 /* \
6013 * Register, memory. \
6014 */ \
6015 if (pVCpu->iem.s.uVexLength) \
6016 { \
6017 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6018 IEM_MC_LOCAL(RTUINT256U, uDst); \
6019 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6020 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6022 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6023 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6024 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6026 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6027 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6028 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6029 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6030 IEM_MC_PREPARE_AVX_USAGE(); \
6031 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6032 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6033 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6034 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6035 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6037 IEM_MC_END(); \
6038 } \
6039 else \
6040 { \
6041 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6042 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
6043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6044 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6045 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6046 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
6047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6048 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6049 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6050 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6051 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6052 IEM_MC_PREPARE_AVX_USAGE(); \
6053 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6054 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6055 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6056 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6057 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6058 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6059 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6060 IEM_MC_END(); \
6061 } \
6062 } \
6063 (void)0
6064
6065/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
6066FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
6067{
6068 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6069 VSHUFP_X(vshufps);
6070}
6071
6072
6073/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6074FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
6075{
6076 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6077 VSHUFP_X(vshufpd);
6078}
6079#undef VSHUFP_X
6080
6081
6082/* Opcode VEX.F3.0F 0xc6 - invalid */
6083/* Opcode VEX.F2.0F 0xc6 - invalid */
6084
6085/* Opcode VEX.0F 0xc7 - invalid */
6086/* Opcode VEX.66.0F 0xc7 - invalid */
6087/* Opcode VEX.F3.0F 0xc7 - invalid */
6088/* Opcode VEX.F2.0F 0xc7 - invalid */
6089
6090/* Opcode VEX.0F 0xc8 - invalid */
6091/* Opcode VEX.0F 0xc9 - invalid */
6092/* Opcode VEX.0F 0xca - invalid */
6093/* Opcode VEX.0F 0xcb - invalid */
6094/* Opcode VEX.0F 0xcc - invalid */
6095/* Opcode VEX.0F 0xcd - invalid */
6096/* Opcode VEX.0F 0xce - invalid */
6097/* Opcode VEX.0F 0xcf - invalid */
6098
6099
6100/* Opcode VEX.0F 0xd0 - invalid */
6101
6102
6103/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
6104FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
6105{
6106 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6107 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
6108 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6109}
6110
6111
6112/* Opcode VEX.F3.0F 0xd0 - invalid */
6113
6114
6115/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
6116FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
6117{
6118 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6119 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
6120 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6121}
6122
6123
6124/* Opcode VEX.0F 0xd1 - invalid */
6125
6126
6127/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
6128FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
6129{
6130 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6131 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
6132 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6133}
6134
6135/* Opcode VEX.F3.0F 0xd1 - invalid */
6136/* Opcode VEX.F2.0F 0xd1 - invalid */
6137
6138/* Opcode VEX.0F 0xd2 - invalid */
6139/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
6140FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
6141{
6142 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6143 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
6144 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6145}
6146
6147/* Opcode VEX.F3.0F 0xd2 - invalid */
6148/* Opcode VEX.F2.0F 0xd2 - invalid */
6149
6150/* Opcode VEX.0F 0xd3 - invalid */
6151/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
6152FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
6153{
6154 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6155 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
6156 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6157}
6158
6159/* Opcode VEX.F3.0F 0xd3 - invalid */
6160/* Opcode VEX.F2.0F 0xd3 - invalid */
6161
6162/* Opcode VEX.0F 0xd4 - invalid */
6163
6164
6165/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
6166FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
6167{
6168 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6169 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
6170 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6171}
6172
6173
6174/* Opcode VEX.F3.0F 0xd4 - invalid */
6175/* Opcode VEX.F2.0F 0xd4 - invalid */
6176
6177/* Opcode VEX.0F 0xd5 - invalid */
6178
6179
6180/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
6181FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
6182{
6183 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6184 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
6185 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6186}
6187
6188
6189/* Opcode VEX.F3.0F 0xd5 - invalid */
6190/* Opcode VEX.F2.0F 0xd5 - invalid */
6191
6192/* Opcode VEX.0F 0xd6 - invalid */
6193
6194/**
6195 * @opcode 0xd6
6196 * @oppfx 0x66
6197 * @opcpuid avx
6198 * @opgroup og_avx_pcksclr_datamove
6199 * @opxcpttype none
6200 * @optest op1=-1 op2=2 -> op1=2
6201 * @optest op1=0 op2=-42 -> op1=-42
6202 */
6203FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
6204{
6205 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
6206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6207 if (IEM_IS_MODRM_REG_MODE(bRm))
6208 {
6209 /*
6210 * Register, register.
6211 */
6212 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6213 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6214
6215 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6216 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6217
6218 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
6219 IEM_GET_MODRM_REG(pVCpu, bRm));
6220 IEM_MC_ADVANCE_RIP_AND_FINISH();
6221 IEM_MC_END();
6222 }
6223 else
6224 {
6225 /*
6226 * Memory, register.
6227 */
6228 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6229 IEM_MC_LOCAL(uint64_t, uSrc);
6230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6231
6232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6233 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6235 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6236
6237 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
6238 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6239
6240 IEM_MC_ADVANCE_RIP_AND_FINISH();
6241 IEM_MC_END();
6242 }
6243}
6244
6245/* Opcode VEX.F3.0F 0xd6 - invalid */
6246/* Opcode VEX.F2.0F 0xd6 - invalid */
6247
6248
6249/* Opcode VEX.0F 0xd7 - invalid */
6250
6251/** Opcode VEX.66.0F 0xd7 - */
6252FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
6253{
6254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6255 /* Docs says register only. */
6256 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
6257 {
6258 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6259 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
6260 if (pVCpu->iem.s.uVexLength)
6261 {
6262 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6263 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
6264 IEM_MC_ARG(uint64_t *, puDst, 0);
6265 IEM_MC_LOCAL(RTUINT256U, uSrc);
6266 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
6267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6268 IEM_MC_PREPARE_AVX_USAGE();
6269 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6270 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6271 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
6272 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
6273 IEM_MC_ADVANCE_RIP_AND_FINISH();
6274 IEM_MC_END();
6275 }
6276 else
6277 {
6278 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6279 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6280 IEM_MC_ARG(uint64_t *, puDst, 0);
6281 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6282 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6283 IEM_MC_PREPARE_AVX_USAGE();
6284 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6285 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
6287 IEM_MC_ADVANCE_RIP_AND_FINISH();
6288 IEM_MC_END();
6289 }
6290 }
6291 else
6292 IEMOP_RAISE_INVALID_OPCODE_RET();
6293}
6294
6295
6296/* Opcode VEX.F3.0F 0xd7 - invalid */
6297/* Opcode VEX.F2.0F 0xd7 - invalid */
6298
6299
6300/* Opcode VEX.0F 0xd8 - invalid */
6301
6302/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
6303FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
6304{
6305 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6306 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
6307 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6308}
6309
6310
6311/* Opcode VEX.F3.0F 0xd8 - invalid */
6312/* Opcode VEX.F2.0F 0xd8 - invalid */
6313
6314/* Opcode VEX.0F 0xd9 - invalid */
6315
6316
6317/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
6318FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
6319{
6320 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6321 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
6322 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6323}
6324
6325
6326/* Opcode VEX.F3.0F 0xd9 - invalid */
6327/* Opcode VEX.F2.0F 0xd9 - invalid */
6328
6329/* Opcode VEX.0F 0xda - invalid */
6330
6331
6332/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6333FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6334{
6335 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6336 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6337 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6338}
6339
6340
6341/* Opcode VEX.F3.0F 0xda - invalid */
6342/* Opcode VEX.F2.0F 0xda - invalid */
6343
6344/* Opcode VEX.0F 0xdb - invalid */
6345
6346
6347/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6348FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6349{
6350 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6351 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6352 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6353}
6354
6355
6356/* Opcode VEX.F3.0F 0xdb - invalid */
6357/* Opcode VEX.F2.0F 0xdb - invalid */
6358
6359/* Opcode VEX.0F 0xdc - invalid */
6360
6361
6362/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6363FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6364{
6365 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6366 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6367 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6368}
6369
6370
6371/* Opcode VEX.F3.0F 0xdc - invalid */
6372/* Opcode VEX.F2.0F 0xdc - invalid */
6373
6374/* Opcode VEX.0F 0xdd - invalid */
6375
6376
6377/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6378FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6379{
6380 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6381 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6382 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6383}
6384
6385
6386/* Opcode VEX.F3.0F 0xdd - invalid */
6387/* Opcode VEX.F2.0F 0xdd - invalid */
6388
6389/* Opcode VEX.0F 0xde - invalid */
6390
6391
6392/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6393FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6394{
6395 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6396 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6397 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6398}
6399
6400
6401/* Opcode VEX.F3.0F 0xde - invalid */
6402/* Opcode VEX.F2.0F 0xde - invalid */
6403
6404/* Opcode VEX.0F 0xdf - invalid */
6405
6406
6407/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6408FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6409{
6410 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6411 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6412 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6413}
6414
6415
6416/* Opcode VEX.F3.0F 0xdf - invalid */
6417/* Opcode VEX.F2.0F 0xdf - invalid */
6418
6419/* Opcode VEX.0F 0xe0 - invalid */
6420
6421
6422/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6423FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6424{
6425 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6426 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6427 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6428}
6429
6430
6431/* Opcode VEX.F3.0F 0xe0 - invalid */
6432/* Opcode VEX.F2.0F 0xe0 - invalid */
6433
6434/* Opcode VEX.0F 0xe1 - invalid */
6435/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6436FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6437{
6438 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6439 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6440 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6441}
6442
6443/* Opcode VEX.F3.0F 0xe1 - invalid */
6444/* Opcode VEX.F2.0F 0xe1 - invalid */
6445
6446/* Opcode VEX.0F 0xe2 - invalid */
6447/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6448FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6449{
6450 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6451 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6453}
6454
6455/* Opcode VEX.F3.0F 0xe2 - invalid */
6456/* Opcode VEX.F2.0F 0xe2 - invalid */
6457
6458/* Opcode VEX.0F 0xe3 - invalid */
6459
6460
6461/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6462FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6463{
6464 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6465 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6466 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6467}
6468
6469
6470/* Opcode VEX.F3.0F 0xe3 - invalid */
6471/* Opcode VEX.F2.0F 0xe3 - invalid */
6472
6473/* Opcode VEX.0F 0xe4 - invalid */
6474
6475
6476/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6477FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6478{
6479 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6480 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6481 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6482}
6483
6484
6485/* Opcode VEX.F3.0F 0xe4 - invalid */
6486/* Opcode VEX.F2.0F 0xe4 - invalid */
6487
6488/* Opcode VEX.0F 0xe5 - invalid */
6489
6490
6491/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6492FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6493{
6494 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6495 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6496 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6497}
6498
6499
6500/* Opcode VEX.F3.0F 0xe5 - invalid */
6501/* Opcode VEX.F2.0F 0xe5 - invalid */
6502
6503/* Opcode VEX.0F 0xe6 - invalid */
6504/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6505FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
6506/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6507FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
6508/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6509FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
6510
6511
6512/* Opcode VEX.0F 0xe7 - invalid */
6513
6514/**
6515 * @opcode 0xe7
6516 * @opcodesub !11 mr/reg
6517 * @oppfx 0x66
6518 * @opcpuid avx
6519 * @opgroup og_avx_cachect
6520 * @opxcpttype 1
6521 * @optest op1=-1 op2=2 -> op1=2
6522 * @optest op1=0 op2=-42 -> op1=-42
6523 */
6524FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6525{
6526 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6527 Assert(pVCpu->iem.s.uVexLength <= 1);
6528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6529 if (IEM_IS_MODRM_MEM_MODE(bRm))
6530 {
6531 if (pVCpu->iem.s.uVexLength == 0)
6532 {
6533 /*
6534 * 128-bit: Memory, register.
6535 */
6536 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6537 IEM_MC_LOCAL(RTUINT128U, uSrc);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6541 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6542 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6543 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6544
6545 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6546 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6547
6548 IEM_MC_ADVANCE_RIP_AND_FINISH();
6549 IEM_MC_END();
6550 }
6551 else
6552 {
6553 /*
6554 * 256-bit: Memory, register.
6555 */
6556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6557 IEM_MC_LOCAL(RTUINT256U, uSrc);
6558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6559
6560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6561 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6563 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6564
6565 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6566 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6567
6568 IEM_MC_ADVANCE_RIP_AND_FINISH();
6569 IEM_MC_END();
6570 }
6571 }
6572 /**
6573 * @opdone
6574 * @opmnemonic udvex660fe7reg
6575 * @opcode 0xe7
6576 * @opcodesub 11 mr/reg
6577 * @oppfx 0x66
6578 * @opunused immediate
6579 * @opcpuid avx
6580 * @optest ->
6581 */
6582 else
6583 IEMOP_RAISE_INVALID_OPCODE_RET();
6584}
6585
6586/* Opcode VEX.F3.0F 0xe7 - invalid */
6587/* Opcode VEX.F2.0F 0xe7 - invalid */
6588
6589
6590/* Opcode VEX.0F 0xe8 - invalid */
6591
6592
6593/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6594FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6595{
6596 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6597 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
6598 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6599}
6600
6601
6602/* Opcode VEX.F3.0F 0xe8 - invalid */
6603/* Opcode VEX.F2.0F 0xe8 - invalid */
6604
6605/* Opcode VEX.0F 0xe9 - invalid */
6606
6607
6608/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
6609FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
6610{
6611 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6612 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
6613 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6614}
6615
6616
6617/* Opcode VEX.F3.0F 0xe9 - invalid */
6618/* Opcode VEX.F2.0F 0xe9 - invalid */
6619
6620/* Opcode VEX.0F 0xea - invalid */
6621
6622
6623/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
6624FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
6625{
6626 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6627 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
6628 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6629}
6630
6631
6632/* Opcode VEX.F3.0F 0xea - invalid */
6633/* Opcode VEX.F2.0F 0xea - invalid */
6634
6635/* Opcode VEX.0F 0xeb - invalid */
6636
6637
6638/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
6639FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
6640{
6641 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6642 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6643 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
6644}
6645
6646
6647
6648/* Opcode VEX.F3.0F 0xeb - invalid */
6649/* Opcode VEX.F2.0F 0xeb - invalid */
6650
6651/* Opcode VEX.0F 0xec - invalid */
6652
6653
6654/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
6655FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
6656{
6657 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6658 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
6659 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6660}
6661
6662
6663/* Opcode VEX.F3.0F 0xec - invalid */
6664/* Opcode VEX.F2.0F 0xec - invalid */
6665
6666/* Opcode VEX.0F 0xed - invalid */
6667
6668
6669/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
6670FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
6671{
6672 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6673 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
6674 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6675}
6676
6677
6678/* Opcode VEX.F3.0F 0xed - invalid */
6679/* Opcode VEX.F2.0F 0xed - invalid */
6680
6681/* Opcode VEX.0F 0xee - invalid */
6682
6683
6684/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
6685FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
6686{
6687 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6688 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
6689 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6690}
6691
6692
6693/* Opcode VEX.F3.0F 0xee - invalid */
6694/* Opcode VEX.F2.0F 0xee - invalid */
6695
6696
6697/* Opcode VEX.0F 0xef - invalid */
6698
6699
6700/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
6701FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
6702{
6703 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6704 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6705 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
6706}
6707
6708
6709/* Opcode VEX.F3.0F 0xef - invalid */
6710/* Opcode VEX.F2.0F 0xef - invalid */
6711
6712/* Opcode VEX.0F 0xf0 - invalid */
6713/* Opcode VEX.66.0F 0xf0 - invalid */
6714
6715
6716/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
6717FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
6718{
6719 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6720 Assert(pVCpu->iem.s.uVexLength <= 1);
6721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6722 if (IEM_IS_MODRM_REG_MODE(bRm))
6723 {
6724 /*
6725 * Register, register - (not implemented, assuming it raises \#UD).
6726 */
6727 IEMOP_RAISE_INVALID_OPCODE_RET();
6728 }
6729 else if (pVCpu->iem.s.uVexLength == 0)
6730 {
6731 /*
6732 * Register, memory128.
6733 */
6734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6735 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6737
6738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6739 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6742
6743 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6744 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6745
6746 IEM_MC_ADVANCE_RIP_AND_FINISH();
6747 IEM_MC_END();
6748 }
6749 else
6750 {
6751 /*
6752 * Register, memory256.
6753 */
6754 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6755 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
6756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6757
6758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6759 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6760 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6761 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6762
6763 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6764 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
6765
6766 IEM_MC_ADVANCE_RIP_AND_FINISH();
6767 IEM_MC_END();
6768 }
6769}
6770
6771
6772/* Opcode VEX.0F 0xf1 - invalid */
6773/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
6774FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
6775{
6776 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6777 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
6778 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6779}
6780
6781/* Opcode VEX.F2.0F 0xf1 - invalid */
6782
6783/* Opcode VEX.0F 0xf2 - invalid */
6784/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
6785FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
6786{
6787 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6788 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
6789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6790}
6791/* Opcode VEX.F2.0F 0xf2 - invalid */
6792
6793/* Opcode VEX.0F 0xf3 - invalid */
6794/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
6795FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
6796{
6797 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6798 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
6799 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6800}
6801/* Opcode VEX.F2.0F 0xf3 - invalid */
6802
6803/* Opcode VEX.0F 0xf4 - invalid */
6804
6805
6806/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
6807FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
6808{
6809 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6810 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
6811 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6812}
6813
6814
6815/* Opcode VEX.F2.0F 0xf4 - invalid */
6816
6817/* Opcode VEX.0F 0xf5 - invalid */
6818
6819
6820/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6821FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6822{
6823 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6824 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6825 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6826}
6827
6828
6829/* Opcode VEX.F2.0F 0xf5 - invalid */
6830
6831/* Opcode VEX.0F 0xf6 - invalid */
6832
6833
6834/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6835FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6836{
6837 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6838 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6839 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6840}
6841
6842
6843/* Opcode VEX.F2.0F 0xf6 - invalid */
6844
6845/* Opcode VEX.0F 0xf7 - invalid */
6846
6847
6848/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6849FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6850{
6851// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6853 if (IEM_IS_MODRM_REG_MODE(bRm))
6854 {
6855 /*
6856 * XMM, XMM, (implicit) [ ER]DI
6857 */
6858 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6859 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6860 IEM_MC_LOCAL( uint64_t, u64EffAddr);
6861 IEM_MC_LOCAL( RTUINT128U, u128Mem);
6862 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
6863 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
6864 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
6865 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6866 IEM_MC_PREPARE_AVX_USAGE();
6867
6868 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
6869 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
6870 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6871 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
6872 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
6873 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
6874
6875 IEM_MC_ADVANCE_RIP_AND_FINISH();
6876 IEM_MC_END();
6877 }
6878 else
6879 {
6880 /* The memory, register encoding is invalid. */
6881 IEMOP_RAISE_INVALID_OPCODE_RET();
6882 }
6883}
6884
6885
6886/* Opcode VEX.F2.0F 0xf7 - invalid */
6887
6888/* Opcode VEX.0F 0xf8 - invalid */
6889
6890
6891/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
6892FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
6893{
6894 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6895 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
6896 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6897}
6898
6899
6900/* Opcode VEX.F2.0F 0xf8 - invalid */
6901
6902/* Opcode VEX.0F 0xf9 - invalid */
6903
6904
6905/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
6906FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
6907{
6908 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6909 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
6910 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6911}
6912
6913
6914/* Opcode VEX.F2.0F 0xf9 - invalid */
6915
6916/* Opcode VEX.0F 0xfa - invalid */
6917
6918
6919/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
6920FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
6921{
6922 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6923 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
6924 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6925}
6926
6927
6928/* Opcode VEX.F2.0F 0xfa - invalid */
6929
6930/* Opcode VEX.0F 0xfb - invalid */
6931
6932
6933/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
6934FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
6935{
6936 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6937 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
6938 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6939}
6940
6941
6942/* Opcode VEX.F2.0F 0xfb - invalid */
6943
6944/* Opcode VEX.0F 0xfc - invalid */
6945
6946
6947/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
6948FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
6949{
6950 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6951 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
6952 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6953}
6954
6955
6956/* Opcode VEX.F2.0F 0xfc - invalid */
6957
6958/* Opcode VEX.0F 0xfd - invalid */
6959
6960
6961/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
6962FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
6963{
6964 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6965 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
6966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6967}
6968
6969
6970/* Opcode VEX.F2.0F 0xfd - invalid */
6971
6972/* Opcode VEX.0F 0xfe - invalid */
6973
6974
6975/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
6976FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
6977{
6978 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6979 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
6980 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6981}
6982
6983
6984/* Opcode VEX.F2.0F 0xfe - invalid */
6985
6986
6987/** Opcode **** 0x0f 0xff - UD0 */
6988FNIEMOP_DEF(iemOp_vud0)
6989{
6990/** @todo testcase: vud0 */
6991 IEMOP_MNEMONIC(vud0, "vud0");
6992 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
6993 {
6994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
6995 if (IEM_IS_MODRM_MEM_MODE(bRm))
6996 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
6997 }
6998 IEMOP_HLP_DONE_DECODING();
6999 IEMOP_RAISE_INVALID_OPCODE_RET();
7000}
7001
7002
7003
7004/**
7005 * VEX opcode map \#1.
7006 *
7007 * @sa g_apfnTwoByteMap
7008 */
7009const PFNIEMOP g_apfnVexMap1[] =
7010{
7011 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7012 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7013 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7014 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7015 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7016 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7017 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7018 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7019 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7020 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7021 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7022 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7023 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
7024 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7025 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7026 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7027 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7028
7029 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
7030 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
7031 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7032 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7033 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7034 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7035 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7036 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7037 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7038 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7039 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7040 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7041 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7042 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7043 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7044 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7045
7046 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7047 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7048 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7049 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7050 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7051 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7052 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7053 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7054 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7055 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7056 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7057 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7058 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7059 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7060 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7061 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7062
7063 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7064 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7065 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7066 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7067 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7068 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7069 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7070 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7071 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7072 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7073 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7074 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7075 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7076 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7077 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7078 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7079
7080 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7081 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7082 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7083 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7084 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7085 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7086 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7087 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7088 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7089 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7090 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7091 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7092 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7093 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7094 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7095 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7096
7097 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7098 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7099 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7100 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7101 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7102 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7103 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7104 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7105 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7106 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7107 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7108 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7109 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7110 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7111 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7112 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7113
7114 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7115 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7116 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7117 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7118 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7119 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7120 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7121 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7122 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7123 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7124 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7125 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7126 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7127 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7128 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7129 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7130
7131 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7132 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7133 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7134 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7135 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7136 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7137 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7138 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7139 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
7140 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
7141 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
7142 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
7143 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7144 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7145 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7146 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7147
7148 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
7149 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
7150 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
7151 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
7152 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
7153 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
7154 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
7155 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
7156 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
7157 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
7158 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
7159 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
7160 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
7161 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
7162 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
7163 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
7164
7165 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
7166 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
7167 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
7168 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
7169 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
7170 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
7171 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
7172 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
7173 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
7174 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
7175 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
7176 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
7177 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
7178 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
7179 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
7180 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
7181
7182 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7183 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7184 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7185 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7186 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7187 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7188 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7189 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7190 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7191 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7192 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
7193 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
7194 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
7195 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
7196 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
7197 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
7198
7199 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7200 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7201 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7202 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7203 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7204 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7205 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7206 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7207 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7208 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7209 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
7210 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
7211 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
7212 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
7213 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
7214 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
7215
7216 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7217 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7218 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7219 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7220 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7221 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7222 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7223 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7224 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7225 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7226 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
7227 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
7228 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
7229 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
7230 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
7231 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
7232
7233 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7234 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7235 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7236 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7237 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7238 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7239 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7240 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7241 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7242 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7243 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7244 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7245 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7246 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7247 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7248 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7249
7250 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7251 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7252 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7253 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7254 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7255 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7256 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7257 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7258 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7259 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7260 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7261 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7262 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7263 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7264 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7265 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7266
7267 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7268 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7269 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7270 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7271 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7272 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7273 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7274 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7275 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7276 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7277 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7278 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7279 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7280 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7281 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7282 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
7283};
7284AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
7285/** @} */
7286
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette