VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105315

Last change on this file since 105315 was 105315, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vcvttss2si, vcvttsd2si, vcvtss2si and vcvtsd2si instruction emulations, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 275.8 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105315 2024-07-12 17:38:53Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383
2384
2385/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2386FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2392 {
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /* XMM, greg64 */
2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2399 IEM_MC_PREPARE_AVX_USAGE();
2400
2401 IEM_MC_LOCAL(X86XMMREG, uDst);
2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2408 puDst, puSrc1, pi64Src2);
2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_ADVANCE_RIP_AND_FINISH();
2413 IEM_MC_END();
2414 }
2415 else
2416 {
2417 /* XMM, [mem64] */
2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_PREPARE_AVX_USAGE();
2424
2425 IEM_MC_LOCAL(X86XMMREG, uDst);
2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2429 IEM_MC_LOCAL(int64_t, i64Src2);
2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2433 puDst, puSrc1, pi64Src2);
2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440 }
2441 else
2442 {
2443 if (IEM_IS_MODRM_REG_MODE(bRm))
2444 {
2445 /* XMM, greg32 */
2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2449 IEM_MC_PREPARE_AVX_USAGE();
2450
2451 IEM_MC_LOCAL(X86XMMREG, uDst);
2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2458 puDst, puSrc1, pi32Src2);
2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465 else
2466 {
2467 /* XMM, [mem32] */
2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474
2475 IEM_MC_LOCAL(X86XMMREG, uDst);
2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2479 IEM_MC_LOCAL(int32_t, i32Src2);
2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2483 puDst, puSrc1, pi32Src2);
2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 }
2491}
2492
2493
2494/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2495FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2496{
2497 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2498 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2501 {
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /* XMM, greg64 */
2505 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2506 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2508 IEM_MC_PREPARE_AVX_USAGE();
2509
2510 IEM_MC_LOCAL(X86XMMREG, uDst);
2511 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2514 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2515 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2516 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2517 puDst, puSrc1, pi64Src2);
2518 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2519 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2520 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524 else
2525 {
2526 /* XMM, [mem64] */
2527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2530 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2532 IEM_MC_PREPARE_AVX_USAGE();
2533
2534 IEM_MC_LOCAL(X86XMMREG, uDst);
2535 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2538 IEM_MC_LOCAL(int64_t, i64Src2);
2539 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2540 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2541 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2542 puDst, puSrc1, pi64Src2);
2543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2545 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 }
2550 else
2551 {
2552 if (IEM_IS_MODRM_REG_MODE(bRm))
2553 {
2554 /* XMM, greg32 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_PREPARE_AVX_USAGE();
2559
2560 IEM_MC_LOCAL(X86XMMREG, uDst);
2561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2562 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2564 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2565 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2567 puDst, puSrc1, pi32Src2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2570 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /* XMM, [mem32] */
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2582 IEM_MC_PREPARE_AVX_USAGE();
2583
2584 IEM_MC_LOCAL(X86XMMREG, uDst);
2585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2586 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2587 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2588 IEM_MC_LOCAL(int32_t, i32Src2);
2589 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2590 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2591 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2592 puDst, puSrc1, pi32Src2);
2593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2594 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2595 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_ADVANCE_RIP_AND_FINISH();
2597 IEM_MC_END();
2598 }
2599 }
2600}
2601
2602
2603/**
2604 * @opcode 0x2b
2605 * @opcodesub !11 mr/reg
2606 * @oppfx none
2607 * @opcpuid avx
2608 * @opgroup og_avx_cachect
2609 * @opxcpttype 1
2610 * @optest op1=1 op2=2 -> op1=2
2611 * @optest op1=0 op2=-42 -> op1=-42
2612 * @note Identical implementation to vmovntpd
2613 */
2614FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2615{
2616 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2617 Assert(pVCpu->iem.s.uVexLength <= 1);
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if (IEM_IS_MODRM_MEM_MODE(bRm))
2620 {
2621 /*
2622 * memory, register.
2623 */
2624 if (pVCpu->iem.s.uVexLength == 0)
2625 {
2626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2629
2630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2633 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2636 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2644 IEM_MC_LOCAL(RTUINT256U, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2651
2652 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 }
2659 /* The register, register encoding is invalid. */
2660 else
2661 IEMOP_RAISE_INVALID_OPCODE_RET();
2662}
2663
2664/**
2665 * @opcode 0x2b
2666 * @opcodesub !11 mr/reg
2667 * @oppfx 0x66
2668 * @opcpuid avx
2669 * @opgroup og_avx_cachect
2670 * @opxcpttype 1
2671 * @optest op1=1 op2=2 -> op1=2
2672 * @optest op1=0 op2=-42 -> op1=-42
2673 * @note Identical implementation to vmovntps
2674 */
2675FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2676{
2677 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2678 Assert(pVCpu->iem.s.uVexLength <= 1);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_MEM_MODE(bRm))
2681 {
2682 /*
2683 * memory, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690
2691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2695
2696 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2697 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2712
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2715
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* The register, register encoding is invalid. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725/**
2726 * @opmnemonic udvexf30f2b
2727 * @opcode 0x2b
2728 * @oppfx 0xf3
2729 * @opunused vex.modrm
2730 * @opcpuid avx
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/**
2736 * @opmnemonic udvexf20f2b
2737 * @opcode 0x2b
2738 * @oppfx 0xf2
2739 * @opunused vex.modrm
2740 * @opcpuid avx
2741 * @optest ->
2742 * @opdone
2743 */
2744
2745
2746/* Opcode VEX.0F 0x2c - invalid */
2747/* Opcode VEX.66.0F 0x2c - invalid */
2748
2749#define IEMOP_VCVTXSS2SI_Gy_Wss_BODY(a_Instr) \
2750 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2753 { \
2754 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2755 { \
2756 /* greg64, XMM */ \
2757 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2758 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2760 IEM_MC_PREPARE_AVX_USAGE(); \
2761 IEM_MC_LOCAL( int64_t, i64Dst); \
2762 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2763 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2764 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2765 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2766 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2767 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2768 pi64Dst, pr32Src); \
2769 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2770 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2772 IEM_MC_END(); \
2773 } \
2774 else \
2775 { \
2776 /* greg64, [mem64] */ \
2777 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2782 IEM_MC_PREPARE_AVX_USAGE(); \
2783 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2784 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2785 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2786 IEM_MC_LOCAL( int64_t, i64Dst); \
2787 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2788 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2789 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2790 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2791 pi64Dst, pr32Src); \
2792 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2793 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2795 IEM_MC_END(); \
2796 } \
2797 } \
2798 else \
2799 { \
2800 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2801 { \
2802 /* greg, XMM */ \
2803 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2804 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2806 IEM_MC_PREPARE_AVX_USAGE(); \
2807 IEM_MC_LOCAL( int32_t, i32Dst); \
2808 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2809 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2810 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2812 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2813 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2814 pi32Dst, pr32Src); \
2815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2816 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2818 IEM_MC_END(); \
2819 } \
2820 else \
2821 { \
2822 /* greg, [mem] */ \
2823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2826 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2828 IEM_MC_PREPARE_AVX_USAGE(); \
2829 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2830 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2831 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2832 IEM_MC_LOCAL( int32_t, i32Dst); \
2833 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2834 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2835 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2836 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2837 pi32Dst, pr32Src); \
2838 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2839 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2841 IEM_MC_END(); \
2842 } \
2843 } \
2844 (void)0
2845
2846
2847#define IEMOP_VCVTXSD2SI_Gy_Wss_BODY(a_Instr) \
2848 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2850 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2851 { \
2852 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2853 { \
2854 /* greg64, XMM */ \
2855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2856 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2857 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2858 IEM_MC_PREPARE_AVX_USAGE(); \
2859 IEM_MC_LOCAL( int64_t, i64Dst); \
2860 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2861 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2862 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2863 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2864 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2865 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2866 pi64Dst, pr64Src); \
2867 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2868 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2870 IEM_MC_END(); \
2871 } \
2872 else \
2873 { \
2874 /* greg64, [mem64] */ \
2875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2878 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2880 IEM_MC_PREPARE_AVX_USAGE(); \
2881 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2882 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2883 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2884 IEM_MC_LOCAL( int64_t, i64Dst); \
2885 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2886 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2887 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2888 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2889 pi64Dst, pr64Src); \
2890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2891 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2893 IEM_MC_END(); \
2894 } \
2895 } \
2896 else \
2897 { \
2898 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2899 { \
2900 /* greg, XMM */ \
2901 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2903 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2904 IEM_MC_PREPARE_AVX_USAGE(); \
2905 IEM_MC_LOCAL( int32_t, i32Dst); \
2906 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2907 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2908 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2909 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2910 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2911 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2912 pi32Dst, pr64Src); \
2913 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2914 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2916 IEM_MC_END(); \
2917 } \
2918 else \
2919 { \
2920 /* greg, [mem] */ \
2921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2924 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2926 IEM_MC_PREPARE_AVX_USAGE(); \
2927 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2929 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2930 IEM_MC_LOCAL( int32_t, i32Dst); \
2931 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2932 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2933 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2934 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2935 pi32Dst, pr64Src); \
2936 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2937 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2939 IEM_MC_END(); \
2940 } \
2941 } \
2942 (void)0
2943
2944
2945/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2946FNIEMOP_DEF(iemOp_vcvttss2si_Gy_Wss)
2947{
2948 IEMOP_MNEMONIC2(VEX_RM, VCVTTSS2SI, vcvttss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvttss2si);
2950}
2951
2952
2953/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2954FNIEMOP_DEF(iemOp_vcvttsd2si_Gy_Wsd)
2955{
2956 IEMOP_MNEMONIC2(VEX_RM, VCVTTSD2SI, vcvttsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2957 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvttsd2si);
2958}
2959
2960
2961/* Opcode VEX.0F 0x2d - invalid */
2962/* Opcode VEX.66.0F 0x2d - invalid */
2963
2964
2965/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2966FNIEMOP_DEF(iemOp_vcvtss2si_Gy_Wss)
2967{
2968 IEMOP_MNEMONIC2(VEX_RM, VCVTSS2SI, vcvtss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2969 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvtss2si);
2970}
2971
2972
2973/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2974FNIEMOP_DEF(iemOp_vcvtsd2si_Gy_Wsd)
2975{
2976 IEMOP_MNEMONIC2(VEX_RM, VCVTSD2SI, vcvtsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2977 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvtsd2si);
2978}
2979
2980
2981
2982/**
2983 * @opcode 0x2e
2984 * @oppfx none
2985 * @opflmodify cf,pf,af,zf,sf,of
2986 * @opflclear af,sf,of
2987 */
2988FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2989{
2990 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2992 if (IEM_IS_MODRM_REG_MODE(bRm))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2999 IEM_MC_LOCAL(uint32_t, fEFlags);
3000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3001 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3002 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3003 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3004 IEM_MC_PREPARE_AVX_USAGE();
3005 IEM_MC_FETCH_EFLAGS(fEFlags);
3006 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3007 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3008 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3009 pEFlags, uSrc1, uSrc2);
3010 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3011 IEM_MC_COMMIT_EFLAGS(fEFlags);
3012
3013 IEM_MC_ADVANCE_RIP_AND_FINISH();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /*
3019 * Register, memory.
3020 */
3021 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3022 IEM_MC_LOCAL(uint32_t, fEFlags);
3023 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3024 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3025 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3031 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3032
3033 IEM_MC_PREPARE_AVX_USAGE();
3034 IEM_MC_FETCH_EFLAGS(fEFlags);
3035 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3036 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3037 pEFlags, uSrc1, uSrc2);
3038 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3039 IEM_MC_COMMIT_EFLAGS(fEFlags);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044}
3045
3046
3047/**
3048 * @opcode 0x2e
3049 * @oppfx 0x66
3050 * @opflmodify cf,pf,af,zf,sf,of
3051 * @opflclear af,sf,of
3052 */
3053FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
3054{
3055 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3064 IEM_MC_LOCAL(uint32_t, fEFlags);
3065 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3066 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3067 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3069 IEM_MC_PREPARE_AVX_USAGE();
3070 IEM_MC_FETCH_EFLAGS(fEFlags);
3071 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3072 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3073 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3074 pEFlags, uSrc1, uSrc2);
3075 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3076 IEM_MC_COMMIT_EFLAGS(fEFlags);
3077
3078 IEM_MC_ADVANCE_RIP_AND_FINISH();
3079 IEM_MC_END();
3080 }
3081 else
3082 {
3083 /*
3084 * Register, memory.
3085 */
3086 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3087 IEM_MC_LOCAL(uint32_t, fEFlags);
3088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3089 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3090 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3094 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3096 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097
3098 IEM_MC_PREPARE_AVX_USAGE();
3099 IEM_MC_FETCH_EFLAGS(fEFlags);
3100 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3101 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3102 pEFlags, uSrc1, uSrc2);
3103 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3104 IEM_MC_COMMIT_EFLAGS(fEFlags);
3105
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 }
3109}
3110
3111
3112/* Opcode VEX.F3.0F 0x2e - invalid */
3113/* Opcode VEX.F2.0F 0x2e - invalid */
3114
3115/**
3116 * @opcode 0x2f
3117 * @oppfx none
3118 * @opflmodify cf,pf,af,zf,sf,of
3119 * @opflclear af,sf,of
3120 */
3121FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
3122{
3123 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_REG_MODE(bRm))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3131 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3132 IEM_MC_LOCAL(uint32_t, fEFlags);
3133 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3134 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3135 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3137 IEM_MC_PREPARE_AVX_USAGE();
3138 IEM_MC_FETCH_EFLAGS(fEFlags);
3139 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3140 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3141 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3142 pEFlags, uSrc1, uSrc2);
3143 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3144 IEM_MC_COMMIT_EFLAGS(fEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /*
3152 * Register, memory.
3153 */
3154 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3155 IEM_MC_LOCAL(uint32_t, fEFlags);
3156 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3157 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3158 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3164 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3165
3166 IEM_MC_PREPARE_AVX_USAGE();
3167 IEM_MC_FETCH_EFLAGS(fEFlags);
3168 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3170 pEFlags, uSrc1, uSrc2);
3171 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3172 IEM_MC_COMMIT_EFLAGS(fEFlags);
3173
3174 IEM_MC_ADVANCE_RIP_AND_FINISH();
3175 IEM_MC_END();
3176 }
3177}
3178
3179
3180/**
3181 * @opcode 0x2f
3182 * @oppfx 0x66
3183 * @opflmodify cf,pf,af,zf,sf,of
3184 * @opflclear af,sf,of
3185 */
3186FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
3187{
3188 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_REG_MODE(bRm))
3191 {
3192 /*
3193 * Register, register.
3194 */
3195 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_LOCAL(uint32_t, fEFlags);
3198 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3199 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3200 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3202 IEM_MC_PREPARE_AVX_USAGE();
3203 IEM_MC_FETCH_EFLAGS(fEFlags);
3204 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3205 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3206 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3207 pEFlags, uSrc1, uSrc2);
3208 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3209 IEM_MC_COMMIT_EFLAGS(fEFlags);
3210
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /*
3217 * Register, memory.
3218 */
3219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3220 IEM_MC_LOCAL(uint32_t, fEFlags);
3221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3222 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3223 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3225
3226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3227 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3229 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230
3231 IEM_MC_PREPARE_AVX_USAGE();
3232 IEM_MC_FETCH_EFLAGS(fEFlags);
3233 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3234 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3235 pEFlags, uSrc1, uSrc2);
3236 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3237 IEM_MC_COMMIT_EFLAGS(fEFlags);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/* Opcode VEX.F3.0F 0x2f - invalid */
3246/* Opcode VEX.F2.0F 0x2f - invalid */
3247
3248/* Opcode VEX.0F 0x30 - invalid */
3249/* Opcode VEX.0F 0x31 - invalid */
3250/* Opcode VEX.0F 0x32 - invalid */
3251/* Opcode VEX.0F 0x33 - invalid */
3252/* Opcode VEX.0F 0x34 - invalid */
3253/* Opcode VEX.0F 0x35 - invalid */
3254/* Opcode VEX.0F 0x36 - invalid */
3255/* Opcode VEX.0F 0x37 - invalid */
3256/* Opcode VEX.0F 0x38 - invalid */
3257/* Opcode VEX.0F 0x39 - invalid */
3258/* Opcode VEX.0F 0x3a - invalid */
3259/* Opcode VEX.0F 0x3b - invalid */
3260/* Opcode VEX.0F 0x3c - invalid */
3261/* Opcode VEX.0F 0x3d - invalid */
3262/* Opcode VEX.0F 0x3e - invalid */
3263/* Opcode VEX.0F 0x3f - invalid */
3264/* Opcode VEX.0F 0x40 - invalid */
3265/* Opcode VEX.0F 0x41 - invalid */
3266/* Opcode VEX.0F 0x42 - invalid */
3267/* Opcode VEX.0F 0x43 - invalid */
3268/* Opcode VEX.0F 0x44 - invalid */
3269/* Opcode VEX.0F 0x45 - invalid */
3270/* Opcode VEX.0F 0x46 - invalid */
3271/* Opcode VEX.0F 0x47 - invalid */
3272/* Opcode VEX.0F 0x48 - invalid */
3273/* Opcode VEX.0F 0x49 - invalid */
3274/* Opcode VEX.0F 0x4a - invalid */
3275/* Opcode VEX.0F 0x4b - invalid */
3276/* Opcode VEX.0F 0x4c - invalid */
3277/* Opcode VEX.0F 0x4d - invalid */
3278/* Opcode VEX.0F 0x4e - invalid */
3279/* Opcode VEX.0F 0x4f - invalid */
3280
3281
3282/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3283FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3284{
3285 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 if (IEM_IS_MODRM_REG_MODE(bRm))
3288 {
3289 /*
3290 * Register, register.
3291 */
3292 if (pVCpu->iem.s.uVexLength == 0)
3293 {
3294 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3295 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3296 IEM_MC_LOCAL(uint8_t, u8Dst);
3297 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3298 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3300 IEM_MC_PREPARE_AVX_USAGE();
3301 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3302 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3303 pu8Dst, puSrc);
3304 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 IEM_MC_END();
3307 }
3308 else
3309 {
3310 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3312 IEM_MC_LOCAL(uint8_t, u8Dst);
3313 IEM_MC_LOCAL(RTUINT256U, uSrc);
3314 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3315 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3316
3317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3318 IEM_MC_PREPARE_AVX_USAGE();
3319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3320 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3321 pu8Dst, puSrc);
3322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 IEM_MC_END();
3325 }
3326 }
3327 /* No memory operand. */
3328 else
3329 IEMOP_RAISE_INVALID_OPCODE_RET();
3330}
3331
3332
3333/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3334FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3335{
3336 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3338 if (IEM_IS_MODRM_REG_MODE(bRm))
3339 {
3340 /*
3341 * Register, register.
3342 */
3343 if (pVCpu->iem.s.uVexLength == 0)
3344 {
3345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3347 IEM_MC_LOCAL(uint8_t, u8Dst);
3348 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3349 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_PREPARE_AVX_USAGE();
3352 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3353 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3354 pu8Dst, puSrc);
3355 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359 else
3360 {
3361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3363 IEM_MC_LOCAL(uint8_t, u8Dst);
3364 IEM_MC_LOCAL(RTUINT256U, uSrc);
3365 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3366 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3367
3368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3369 IEM_MC_PREPARE_AVX_USAGE();
3370 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3371 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3372 pu8Dst, puSrc);
3373 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 IEM_MC_END();
3376 }
3377 }
3378 /* No memory operand. */
3379 else
3380 IEMOP_RAISE_INVALID_OPCODE_RET();
3381}
3382
3383
3384/* Opcode VEX.F3.0F 0x50 - invalid */
3385/* Opcode VEX.F2.0F 0x50 - invalid */
3386
3387/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3388FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3389{
3390 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3391 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3392 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3393}
3394
3395
3396/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3397FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3398{
3399 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3400 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3402}
3403
3404
3405/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3406FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3407{
3408 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3409 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3410 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3411}
3412
3413
3414/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3415FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3416{
3417 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3418 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3419 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3420}
3421
3422
3423/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3424FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3427 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3429}
3430
3431
3432/* Opcode VEX.66.0F 0x52 - invalid */
3433
3434
3435/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3436FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3437{
3438 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3439 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3440 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3441}
3442
3443
3444/* Opcode VEX.F2.0F 0x52 - invalid */
3445
3446
3447/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3448FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3449{
3450 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3451 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3453}
3454
3455
3456/* Opcode VEX.66.0F 0x53 - invalid */
3457
3458
3459/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3460FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3461{
3462 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3463 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3464 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3465}
3466
3467
3468/* Opcode VEX.F2.0F 0x53 - invalid */
3469
3470
3471/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3472FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3473{
3474 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3475 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3476 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3477}
3478
3479
3480/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3481FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3482{
3483 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3485 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3486}
3487
3488
3489/* Opcode VEX.F3.0F 0x54 - invalid */
3490/* Opcode VEX.F2.0F 0x54 - invalid */
3491
3492
3493/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3494FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3495{
3496 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3497 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3498 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3499}
3500
3501
3502/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3503FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3504{
3505 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3506 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3507 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3508}
3509
3510
3511/* Opcode VEX.F3.0F 0x55 - invalid */
3512/* Opcode VEX.F2.0F 0x55 - invalid */
3513
3514/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3515FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3516{
3517 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3519 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3520}
3521
3522
3523/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3524FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3525{
3526 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3528 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3529}
3530
3531
3532/* Opcode VEX.F3.0F 0x56 - invalid */
3533/* Opcode VEX.F2.0F 0x56 - invalid */
3534
3535
3536/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3537FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3538{
3539 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3541 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3546FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3547{
3548 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3551}
3552
3553
3554/* Opcode VEX.F3.0F 0x57 - invalid */
3555/* Opcode VEX.F2.0F 0x57 - invalid */
3556
3557
3558/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3559FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3560{
3561 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3562 IEMOPMEDIAF3_INIT_VARS( vaddps);
3563 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3568FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3569{
3570 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3573}
3574
3575
3576/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3577FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3582}
3583
3584
3585/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3586FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3587{
3588 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3589 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3590 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3591}
3592
3593
3594/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3595FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3596{
3597 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3598 IEMOPMEDIAF3_INIT_VARS( vmulps);
3599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3600}
3601
3602
3603/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3604FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3605{
3606 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3607 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3609}
3610
3611
3612/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3613FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3614{
3615 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3616 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3617 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3618}
3619
3620
3621/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3622FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3623{
3624 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3625 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3626 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3627}
3628
3629
3630/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3631FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
3632/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3633FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
3634/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3635FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
3636/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3637FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
3638
3639/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3640FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
3641/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3642FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
3643/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3644FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
3645/* Opcode VEX.F2.0F 0x5b - invalid */
3646
3647
3648/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3649FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3650{
3651 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3652 IEMOPMEDIAF3_INIT_VARS( vsubps);
3653 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3654}
3655
3656
3657/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3658FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3659{
3660 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3661 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3662 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3663}
3664
3665
3666/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3667FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3668{
3669 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3670 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3671 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3672}
3673
3674
3675/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3676FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3677{
3678 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3679 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3680 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3681}
3682
3683
3684/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3685FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3686{
3687 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3688 IEMOPMEDIAF3_INIT_VARS( vminps);
3689 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3690}
3691
3692
3693/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3694FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3695{
3696 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3697 IEMOPMEDIAF3_INIT_VARS( vminpd);
3698 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3699}
3700
3701
3702/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3703FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3704{
3705 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3706 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3707 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3708}
3709
3710
3711/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3712FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3713{
3714 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3715 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3716 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3717}
3718
3719
3720/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3721FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3722{
3723 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3724 IEMOPMEDIAF3_INIT_VARS( vdivps);
3725 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3726}
3727
3728
3729/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3730FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3731{
3732 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3733 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3734 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3735}
3736
3737
3738/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3739FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3740{
3741 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3743 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3744}
3745
3746
3747/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3748FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3749{
3750 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3751 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3752 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3753}
3754
3755
3756/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3757FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3758{
3759 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3760 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3761 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3762}
3763
3764
3765/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3766FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3767{
3768 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3769 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3770 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3771}
3772
3773
3774/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3775FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3776{
3777 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3778 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3779 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3780}
3781
3782
3783/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3784FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3785{
3786 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3787 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3788 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3789}
3790
3791
3792/* Opcode VEX.0F 0x60 - invalid */
3793
3794
3795/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3796FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3797{
3798 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3799 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3800 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3801}
3802
3803
3804/* Opcode VEX.F3.0F 0x60 - invalid */
3805
3806
3807/* Opcode VEX.0F 0x61 - invalid */
3808
3809
3810/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3811FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3812{
3813 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3814 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3815 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3816}
3817
3818
3819/* Opcode VEX.F3.0F 0x61 - invalid */
3820
3821
3822/* Opcode VEX.0F 0x62 - invalid */
3823
3824/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3825FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3826{
3827 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3828 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3829 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3830}
3831
3832
3833/* Opcode VEX.F3.0F 0x62 - invalid */
3834
3835
3836
3837/* Opcode VEX.0F 0x63 - invalid */
3838
3839
3840/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3841FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3842{
3843 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3844 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3845 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3846}
3847
3848
3849/* Opcode VEX.F3.0F 0x63 - invalid */
3850
3851/* Opcode VEX.0F 0x64 - invalid */
3852
3853
3854/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3855FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3856{
3857 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3858 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3859 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3860}
3861
3862
3863/* Opcode VEX.F3.0F 0x64 - invalid */
3864
3865/* Opcode VEX.0F 0x65 - invalid */
3866
3867
3868/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3869FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3870{
3871 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3872 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3873 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3874}
3875
3876
3877/* Opcode VEX.F3.0F 0x65 - invalid */
3878
3879/* Opcode VEX.0F 0x66 - invalid */
3880
3881
3882/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3883FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3884{
3885 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3886 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3887 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3888}
3889
3890
3891/* Opcode VEX.F3.0F 0x66 - invalid */
3892
3893/* Opcode VEX.0F 0x67 - invalid */
3894
3895
3896/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3897FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3898{
3899 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3900 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3901 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3902}
3903
3904
3905/* Opcode VEX.F3.0F 0x67 - invalid */
3906
3907
3908///**
3909// * Common worker for SSE2 instructions on the form:
3910// * pxxxx xmm1, xmm2/mem128
3911// *
3912// * The 2nd operand is the second half of a register, which in the memory case
3913// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3914// * where it may read the full 128 bits or only the upper 64 bits.
3915// *
3916// * Exceptions type 4.
3917// */
3918//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3919//{
3920// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3921// if (IEM_IS_MODRM_REG_MODE(bRm))
3922// {
3923// /*
3924// * Register, register.
3925// */
3926// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3927// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3928// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3929// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3930// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3931// IEM_MC_PREPARE_SSE_USAGE();
3932// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3933// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3934// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3935// IEM_MC_ADVANCE_RIP_AND_FINISH();
3936// IEM_MC_END();
3937// }
3938// else
3939// {
3940// /*
3941// * Register, memory.
3942// */
3943// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3944// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3945// IEM_MC_LOCAL(RTUINT128U, uSrc);
3946// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3947// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3948//
3949// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3950// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3951// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3952// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3953//
3954// IEM_MC_PREPARE_SSE_USAGE();
3955// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3956// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3957//
3958// IEM_MC_ADVANCE_RIP_AND_FINISH();
3959// IEM_MC_END();
3960// }
3961// return VINF_SUCCESS;
3962//}
3963
3964
3965/* Opcode VEX.0F 0x68 - invalid */
3966
3967/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3968FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3969{
3970 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3971 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3972 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3973}
3974
3975
3976/* Opcode VEX.F3.0F 0x68 - invalid */
3977
3978
3979/* Opcode VEX.0F 0x69 - invalid */
3980
3981
3982/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3983FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3984{
3985 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3986 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3988}
3989
3990
3991/* Opcode VEX.F3.0F 0x69 - invalid */
3992
3993
3994/* Opcode VEX.0F 0x6a - invalid */
3995
3996
3997/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3998FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3999{
4000 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4001 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
4002 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4003}
4004
4005
4006/* Opcode VEX.F3.0F 0x6a - invalid */
4007
4008
4009/* Opcode VEX.0F 0x6b - invalid */
4010
4011
4012/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
4013FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
4014{
4015 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4016 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
4017 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4018}
4019
4020
4021/* Opcode VEX.F3.0F 0x6b - invalid */
4022
4023
4024/* Opcode VEX.0F 0x6c - invalid */
4025
4026
4027/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
4028FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
4029{
4030 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4031 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
4032 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4033}
4034
4035
4036/* Opcode VEX.F3.0F 0x6c - invalid */
4037/* Opcode VEX.F2.0F 0x6c - invalid */
4038
4039
4040/* Opcode VEX.0F 0x6d - invalid */
4041
4042
4043/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
4044FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
4045{
4046 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4047 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
4048 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4049}
4050
4051
4052/* Opcode VEX.F3.0F 0x6d - invalid */
4053
4054
4055/* Opcode VEX.0F 0x6e - invalid */
4056
4057FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
4058{
4059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4061 {
4062 /**
4063 * @opcode 0x6e
4064 * @opcodesub rex.w=1
4065 * @oppfx 0x66
4066 * @opcpuid avx
4067 * @opgroup og_avx_simdint_datamov
4068 * @opxcpttype 5
4069 * @optest 64-bit / op1=1 op2=2 -> op1=2
4070 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4071 */
4072 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4073 if (IEM_IS_MODRM_REG_MODE(bRm))
4074 {
4075 /* XMM, greg64 */
4076 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4077 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4078 IEM_MC_LOCAL(uint64_t, u64Tmp);
4079
4080 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4081 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4082
4083 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4084 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4085
4086 IEM_MC_ADVANCE_RIP_AND_FINISH();
4087 IEM_MC_END();
4088 }
4089 else
4090 {
4091 /* XMM, [mem64] */
4092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4094 IEM_MC_LOCAL(uint64_t, u64Tmp);
4095
4096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4097 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4100
4101 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4102 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4103
4104 IEM_MC_ADVANCE_RIP_AND_FINISH();
4105 IEM_MC_END();
4106 }
4107 }
4108 else
4109 {
4110 /**
4111 * @opdone
4112 * @opcode 0x6e
4113 * @opcodesub rex.w=0
4114 * @oppfx 0x66
4115 * @opcpuid avx
4116 * @opgroup og_avx_simdint_datamov
4117 * @opxcpttype 5
4118 * @opfunction iemOp_vmovd_q_Vy_Ey
4119 * @optest op1=1 op2=2 -> op1=2
4120 * @optest op1=0 op2=-42 -> op1=-42
4121 */
4122 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4123 if (IEM_IS_MODRM_REG_MODE(bRm))
4124 {
4125 /* XMM, greg32 */
4126 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4127 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4128 IEM_MC_LOCAL(uint32_t, u32Tmp);
4129
4130 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4131 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4132
4133 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4134 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4135
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 IEM_MC_END();
4138 }
4139 else
4140 {
4141 /* XMM, [mem32] */
4142 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4144 IEM_MC_LOCAL(uint32_t, u32Tmp);
4145
4146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4147 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4148 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4149 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4150
4151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4152 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4153
4154 IEM_MC_ADVANCE_RIP_AND_FINISH();
4155 IEM_MC_END();
4156 }
4157 }
4158}
4159
4160
4161/* Opcode VEX.F3.0F 0x6e - invalid */
4162
4163
4164/* Opcode VEX.0F 0x6f - invalid */
4165
4166/**
4167 * @opcode 0x6f
4168 * @oppfx 0x66
4169 * @opcpuid avx
4170 * @opgroup og_avx_simdint_datamove
4171 * @opxcpttype 1
4172 * @optest op1=1 op2=2 -> op1=2
4173 * @optest op1=0 op2=-42 -> op1=-42
4174 */
4175FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
4176{
4177 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4178 Assert(pVCpu->iem.s.uVexLength <= 1);
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180 if (IEM_IS_MODRM_REG_MODE(bRm))
4181 {
4182 /*
4183 * Register, register.
4184 */
4185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4186 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4187
4188 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4189 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4190 if (pVCpu->iem.s.uVexLength == 0)
4191 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4192 IEM_GET_MODRM_RM(pVCpu, bRm));
4193 else
4194 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4195 IEM_GET_MODRM_RM(pVCpu, bRm));
4196 IEM_MC_ADVANCE_RIP_AND_FINISH();
4197 IEM_MC_END();
4198 }
4199 else if (pVCpu->iem.s.uVexLength == 0)
4200 {
4201 /*
4202 * Register, memory128.
4203 */
4204 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4205 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4207
4208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4209 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4210 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4211 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4212
4213 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4214 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4215
4216 IEM_MC_ADVANCE_RIP_AND_FINISH();
4217 IEM_MC_END();
4218 }
4219 else
4220 {
4221 /*
4222 * Register, memory256.
4223 */
4224 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4225 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4227
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4229 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4232
4233 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4234 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4235
4236 IEM_MC_ADVANCE_RIP_AND_FINISH();
4237 IEM_MC_END();
4238 }
4239}
4240
4241/**
4242 * @opcode 0x6f
4243 * @oppfx 0xf3
4244 * @opcpuid avx
4245 * @opgroup og_avx_simdint_datamove
4246 * @opxcpttype 4UA
4247 * @optest op1=1 op2=2 -> op1=2
4248 * @optest op1=0 op2=-42 -> op1=-42
4249 */
4250FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4251{
4252 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4253 Assert(pVCpu->iem.s.uVexLength <= 1);
4254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4255 if (IEM_IS_MODRM_REG_MODE(bRm))
4256 {
4257 /*
4258 * Register, register.
4259 */
4260 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4261 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4262
4263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4264 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4265 if (pVCpu->iem.s.uVexLength == 0)
4266 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4267 IEM_GET_MODRM_RM(pVCpu, bRm));
4268 else
4269 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4270 IEM_GET_MODRM_RM(pVCpu, bRm));
4271 IEM_MC_ADVANCE_RIP_AND_FINISH();
4272 IEM_MC_END();
4273 }
4274 else if (pVCpu->iem.s.uVexLength == 0)
4275 {
4276 /*
4277 * Register, memory128.
4278 */
4279 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4280 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4282
4283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4286 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4287
4288 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4289 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4290
4291 IEM_MC_ADVANCE_RIP_AND_FINISH();
4292 IEM_MC_END();
4293 }
4294 else
4295 {
4296 /*
4297 * Register, memory256.
4298 */
4299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4300 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4302
4303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4305 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4306 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4307
4308 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4309 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4310
4311 IEM_MC_ADVANCE_RIP_AND_FINISH();
4312 IEM_MC_END();
4313 }
4314}
4315
4316
4317/* Opcode VEX.0F 0x70 - invalid */
4318
4319
4320/**
4321 * Common worker for AVX/AVX2 instructions on the forms:
4322 * - vpxxx xmm0, xmm2/mem128, imm8
4323 * - vpxxx ymm0, ymm2/mem256, imm8
4324 *
4325 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4326 */
4327FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4328{
4329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4330 if (IEM_IS_MODRM_REG_MODE(bRm))
4331 {
4332 /*
4333 * Register, register.
4334 */
4335 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4336 if (pVCpu->iem.s.uVexLength)
4337 {
4338 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4339 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4340 IEM_MC_LOCAL(RTUINT256U, uDst);
4341 IEM_MC_LOCAL(RTUINT256U, uSrc);
4342 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4343 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4344 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4345 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4346 IEM_MC_PREPARE_AVX_USAGE();
4347 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4348 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4349 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4350 IEM_MC_ADVANCE_RIP_AND_FINISH();
4351 IEM_MC_END();
4352 }
4353 else
4354 {
4355 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4356 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4357 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4358 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4359 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4360 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4361 IEM_MC_PREPARE_AVX_USAGE();
4362 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4363 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4364 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4365 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4366 IEM_MC_ADVANCE_RIP_AND_FINISH();
4367 IEM_MC_END();
4368 }
4369 }
4370 else
4371 {
4372 /*
4373 * Register, memory.
4374 */
4375 if (pVCpu->iem.s.uVexLength)
4376 {
4377 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4378 IEM_MC_LOCAL(RTUINT256U, uDst);
4379 IEM_MC_LOCAL(RTUINT256U, uSrc);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4382 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4383
4384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4385 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4386 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4387 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4388 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4389 IEM_MC_PREPARE_AVX_USAGE();
4390
4391 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4392 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4393 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4394
4395 IEM_MC_ADVANCE_RIP_AND_FINISH();
4396 IEM_MC_END();
4397 }
4398 else
4399 {
4400 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4401 IEM_MC_LOCAL(RTUINT128U, uSrc);
4402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4403 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4404 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4405
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4407 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4408 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4409 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4411 IEM_MC_PREPARE_AVX_USAGE();
4412
4413 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4414 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4415 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4416 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4417
4418 IEM_MC_ADVANCE_RIP_AND_FINISH();
4419 IEM_MC_END();
4420 }
4421 }
4422}
4423
4424
4425/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4426FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4427{
4428 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4429 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4430 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4431
4432}
4433
4434
4435/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4436FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4437{
4438 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4439 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4440 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4441
4442}
4443
4444
4445/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4446FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4447{
4448 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4449 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4450 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4451}
4452
4453
4454/**
4455 * Common worker(s) for AVX/AVX2 instructions on the forms:
4456 * - vpxxx xmm0, xmm2, imm8
4457 * - vpxxx ymm0, ymm2, imm8
4458 *
4459 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4460 */
4461FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4462{
4463 if (IEM_IS_MODRM_REG_MODE(bRm))
4464 {
4465 /*
4466 * Register, register.
4467 */
4468 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4469 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4470 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4471 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4472 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4473 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4474 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4475 IEM_MC_PREPARE_AVX_USAGE();
4476 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4477 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4478 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4479 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4480 IEM_MC_ADVANCE_RIP_AND_FINISH();
4481 IEM_MC_END();
4482 }
4483 /* No memory operand. */
4484 else
4485 IEMOP_RAISE_INVALID_OPCODE_RET();
4486}
4487
4488FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4489{
4490 if (IEM_IS_MODRM_REG_MODE(bRm))
4491 {
4492 /*
4493 * Register, register.
4494 */
4495 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4496 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4497 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4498 IEM_MC_LOCAL(RTUINT256U, uDst);
4499 IEM_MC_LOCAL(RTUINT256U, uSrc);
4500 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4501 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4502 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4503 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4504 IEM_MC_PREPARE_AVX_USAGE();
4505 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4506 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4507 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4508 IEM_MC_ADVANCE_RIP_AND_FINISH();
4509 IEM_MC_END();
4510 }
4511 /* No memory operand. */
4512 else
4513 IEMOP_RAISE_INVALID_OPCODE_RET();
4514}
4515
4516
4517/* Opcode VEX.0F 0x71 11/2 - invalid. */
4518/** Opcode VEX.66.0F 0x71 11/2. */
4519FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4520{
4521 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4522 if (pVCpu->iem.s.uVexLength)
4523 {
4524 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4525 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4526 }
4527 else
4528 {
4529 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4530 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4531 }
4532}
4533
4534
4535/* Opcode VEX.0F 0x71 11/4 - invalid */
4536/** Opcode VEX.66.0F 0x71 11/4. */
4537FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4538{
4539 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4540 if (pVCpu->iem.s.uVexLength)
4541 {
4542 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4543 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4544 }
4545 else
4546 {
4547 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4548 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4549 }
4550}
4551
4552/* Opcode VEX.0F 0x71 11/6 - invalid */
4553
4554/** Opcode VEX.66.0F 0x71 11/6. */
4555FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4556{
4557 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4558 if (pVCpu->iem.s.uVexLength)
4559 {
4560 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4561 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4562 }
4563 else
4564 {
4565 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4566 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4567 }
4568}
4569
4570
4571/**
4572 * VEX Group 12 jump table for register variant.
4573 */
4574IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4575{
4576 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4577 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4578 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4579 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4580 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4581 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4582 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4583 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4584};
4585AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4586
4587
4588/** Opcode VEX.0F 0x71. */
4589FNIEMOP_DEF(iemOp_VGrp12)
4590{
4591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4592 if (IEM_IS_MODRM_REG_MODE(bRm))
4593 /* register, register */
4594 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4595 + pVCpu->iem.s.idxPrefix], bRm);
4596 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4597}
4598
4599
4600/* Opcode VEX.0F 0x72 11/2 - invalid. */
4601/** Opcode VEX.66.0F 0x72 11/2. */
4602FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4603{
4604 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4605 if (pVCpu->iem.s.uVexLength)
4606 {
4607 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4608 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4609 }
4610 else
4611 {
4612 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4613 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4614 }
4615}
4616
4617
4618/* Opcode VEX.0F 0x72 11/4 - invalid. */
4619/** Opcode VEX.66.0F 0x72 11/4. */
4620FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4621{
4622 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4623 if (pVCpu->iem.s.uVexLength)
4624 {
4625 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4626 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4627 }
4628 else
4629 {
4630 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4631 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4632 }
4633}
4634
4635/* Opcode VEX.0F 0x72 11/6 - invalid. */
4636
4637/** Opcode VEX.66.0F 0x72 11/6. */
4638FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4639{
4640 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4641 if (pVCpu->iem.s.uVexLength)
4642 {
4643 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4644 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4645 }
4646 else
4647 {
4648 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4649 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4650 }
4651}
4652
4653
4654/**
4655 * Group 13 jump table for register variant.
4656 */
4657IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4658{
4659 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4660 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4661 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4662 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4663 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4664 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4665 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4666 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4667};
4668AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4669
4670/** Opcode VEX.0F 0x72. */
4671FNIEMOP_DEF(iemOp_VGrp13)
4672{
4673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4674 if (IEM_IS_MODRM_REG_MODE(bRm))
4675 /* register, register */
4676 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4677 + pVCpu->iem.s.idxPrefix], bRm);
4678 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4679}
4680
4681
4682/* Opcode VEX.0F 0x73 11/2 - invalid. */
4683/** Opcode VEX.66.0F 0x73 11/2. */
4684FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4685{
4686 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4687 if (pVCpu->iem.s.uVexLength)
4688 {
4689 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4690 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4691 }
4692 else
4693 {
4694 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4695 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4696 }
4697}
4698
4699
4700/** Opcode VEX.66.0F 0x73 11/3. */
4701FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4702{
4703 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4704 if (pVCpu->iem.s.uVexLength)
4705 {
4706 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4707 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4708 }
4709 else
4710 {
4711 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4712 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4713 }
4714}
4715
4716/* Opcode VEX.0F 0x73 11/6 - invalid. */
4717
4718/** Opcode VEX.66.0F 0x73 11/6. */
4719FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4720{
4721 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4722 if (pVCpu->iem.s.uVexLength)
4723 {
4724 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4725 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4726 }
4727 else
4728 {
4729 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4730 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4731 }
4732}
4733
4734/** Opcode VEX.66.0F 0x73 11/7. */
4735FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4736{
4737 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4738 if (pVCpu->iem.s.uVexLength)
4739 {
4740 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4741 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4742 }
4743 else
4744 {
4745 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4746 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4747 }
4748}
4749
4750/* Opcode VEX.0F 0x73 11/6 - invalid. */
4751
4752/**
4753 * Group 14 jump table for register variant.
4754 */
4755IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4756{
4757 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4758 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4759 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4760 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4761 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4762 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4763 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4764 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4765};
4766AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4767
4768
4769/** Opcode VEX.0F 0x73. */
4770FNIEMOP_DEF(iemOp_VGrp14)
4771{
4772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4773 if (IEM_IS_MODRM_REG_MODE(bRm))
4774 /* register, register */
4775 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4776 + pVCpu->iem.s.idxPrefix], bRm);
4777 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4778}
4779
4780
4781/* Opcode VEX.0F 0x74 - invalid */
4782
4783
4784/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4785FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4786{
4787 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4788 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4790}
4791
4792/* Opcode VEX.F3.0F 0x74 - invalid */
4793/* Opcode VEX.F2.0F 0x74 - invalid */
4794
4795
4796/* Opcode VEX.0F 0x75 - invalid */
4797
4798
4799/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4800FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4801{
4802 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4803 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4804 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4805}
4806
4807
4808/* Opcode VEX.F3.0F 0x75 - invalid */
4809/* Opcode VEX.F2.0F 0x75 - invalid */
4810
4811
4812/* Opcode VEX.0F 0x76 - invalid */
4813
4814
4815/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4816FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4817{
4818 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4819 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4820 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4821}
4822
4823
4824/* Opcode VEX.F3.0F 0x76 - invalid */
4825/* Opcode VEX.F2.0F 0x76 - invalid */
4826
4827
4828/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4829FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4830{
4831 Assert(pVCpu->iem.s.uVexLength <= 1);
4832 if (pVCpu->iem.s.uVexLength == 0)
4833 {
4834 /*
4835 * 128-bit: vzeroupper
4836 */
4837 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4838 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4839
4840 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4841 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4842 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4843
4844 IEM_MC_CLEAR_YREG_128_UP(0);
4845 IEM_MC_CLEAR_YREG_128_UP(1);
4846 IEM_MC_CLEAR_YREG_128_UP(2);
4847 IEM_MC_CLEAR_YREG_128_UP(3);
4848 IEM_MC_CLEAR_YREG_128_UP(4);
4849 IEM_MC_CLEAR_YREG_128_UP(5);
4850 IEM_MC_CLEAR_YREG_128_UP(6);
4851 IEM_MC_CLEAR_YREG_128_UP(7);
4852
4853 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4854 {
4855 IEM_MC_CLEAR_YREG_128_UP( 8);
4856 IEM_MC_CLEAR_YREG_128_UP( 9);
4857 IEM_MC_CLEAR_YREG_128_UP(10);
4858 IEM_MC_CLEAR_YREG_128_UP(11);
4859 IEM_MC_CLEAR_YREG_128_UP(12);
4860 IEM_MC_CLEAR_YREG_128_UP(13);
4861 IEM_MC_CLEAR_YREG_128_UP(14);
4862 IEM_MC_CLEAR_YREG_128_UP(15);
4863 }
4864
4865 IEM_MC_ADVANCE_RIP_AND_FINISH();
4866 IEM_MC_END();
4867 }
4868 else
4869 {
4870 /*
4871 * 256-bit: vzeroall
4872 */
4873 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4874 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4875
4876 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4878 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4879
4880 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4881 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4882 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4883 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4884 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4885 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4886 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4887 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4888 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4889
4890 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4891 {
4892 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4893 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4894 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4895 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4896 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4897 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4898 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4899 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4900 }
4901
4902 IEM_MC_ADVANCE_RIP_AND_FINISH();
4903 IEM_MC_END();
4904 }
4905}
4906
4907
4908/* Opcode VEX.66.0F 0x77 - invalid */
4909/* Opcode VEX.F3.0F 0x77 - invalid */
4910/* Opcode VEX.F2.0F 0x77 - invalid */
4911
4912/* Opcode VEX.0F 0x78 - invalid */
4913/* Opcode VEX.66.0F 0x78 - invalid */
4914/* Opcode VEX.F3.0F 0x78 - invalid */
4915/* Opcode VEX.F2.0F 0x78 - invalid */
4916
4917/* Opcode VEX.0F 0x79 - invalid */
4918/* Opcode VEX.66.0F 0x79 - invalid */
4919/* Opcode VEX.F3.0F 0x79 - invalid */
4920/* Opcode VEX.F2.0F 0x79 - invalid */
4921
4922/* Opcode VEX.0F 0x7a - invalid */
4923/* Opcode VEX.66.0F 0x7a - invalid */
4924/* Opcode VEX.F3.0F 0x7a - invalid */
4925/* Opcode VEX.F2.0F 0x7a - invalid */
4926
4927/* Opcode VEX.0F 0x7b - invalid */
4928/* Opcode VEX.66.0F 0x7b - invalid */
4929/* Opcode VEX.F3.0F 0x7b - invalid */
4930/* Opcode VEX.F2.0F 0x7b - invalid */
4931
4932/* Opcode VEX.0F 0x7c - invalid */
4933
4934
4935/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4936FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
4937{
4938 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4939 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
4940 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4941}
4942
4943
4944/* Opcode VEX.F3.0F 0x7c - invalid */
4945
4946
4947/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4948FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4949{
4950 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4951 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4952 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4953}
4954
4955
4956/* Opcode VEX.0F 0x7d - invalid */
4957
4958
4959/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4960FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
4961{
4962 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4963 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
4964 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4965}
4966
4967
4968/* Opcode VEX.F3.0F 0x7d - invalid */
4969
4970
4971/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4972FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
4973{
4974 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4975 IEMOPMEDIAF3_INIT_VARS( vhsubps);
4976 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4977}
4978
4979
4980/* Opcode VEX.0F 0x7e - invalid */
4981
4982FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4983{
4984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4985 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4986 {
4987 /**
4988 * @opcode 0x7e
4989 * @opcodesub rex.w=1
4990 * @oppfx 0x66
4991 * @opcpuid avx
4992 * @opgroup og_avx_simdint_datamov
4993 * @opxcpttype 5
4994 * @optest 64-bit / op1=1 op2=2 -> op1=2
4995 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4996 */
4997 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4998 if (IEM_IS_MODRM_REG_MODE(bRm))
4999 {
5000 /* greg64, XMM */
5001 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5002 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5003 IEM_MC_LOCAL(uint64_t, u64Tmp);
5004
5005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5006 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5007
5008 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5009 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5010
5011 IEM_MC_ADVANCE_RIP_AND_FINISH();
5012 IEM_MC_END();
5013 }
5014 else
5015 {
5016 /* [mem64], XMM */
5017 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5019 IEM_MC_LOCAL(uint64_t, u64Tmp);
5020
5021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5022 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5023 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5024 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5025
5026 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5027 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5028
5029 IEM_MC_ADVANCE_RIP_AND_FINISH();
5030 IEM_MC_END();
5031 }
5032 }
5033 else
5034 {
5035 /**
5036 * @opdone
5037 * @opcode 0x7e
5038 * @opcodesub rex.w=0
5039 * @oppfx 0x66
5040 * @opcpuid avx
5041 * @opgroup og_avx_simdint_datamov
5042 * @opxcpttype 5
5043 * @opfunction iemOp_vmovd_q_Vy_Ey
5044 * @optest op1=1 op2=2 -> op1=2
5045 * @optest op1=0 op2=-42 -> op1=-42
5046 */
5047 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5048 if (IEM_IS_MODRM_REG_MODE(bRm))
5049 {
5050 /* greg32, XMM */
5051 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5052 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5053 IEM_MC_LOCAL(uint32_t, u32Tmp);
5054
5055 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5056 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5057
5058 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5059 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5060
5061 IEM_MC_ADVANCE_RIP_AND_FINISH();
5062 IEM_MC_END();
5063 }
5064 else
5065 {
5066 /* [mem32], XMM */
5067 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5069 IEM_MC_LOCAL(uint32_t, u32Tmp);
5070
5071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5072 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5073 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5074 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5075
5076 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5077 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5078
5079 IEM_MC_ADVANCE_RIP_AND_FINISH();
5080 IEM_MC_END();
5081 }
5082 }
5083}
5084
5085
5086/**
5087 * @opcode 0x7e
5088 * @oppfx 0xf3
5089 * @opcpuid avx
5090 * @opgroup og_avx_pcksclr_datamove
5091 * @opxcpttype none
5092 * @optest op1=1 op2=2 -> op1=2
5093 * @optest op1=0 op2=-42 -> op1=-42
5094 */
5095FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
5096{
5097 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5099 if (IEM_IS_MODRM_REG_MODE(bRm))
5100 {
5101 /*
5102 * Register, register.
5103 */
5104 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5105 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5106
5107 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5108 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5109
5110 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
5111 IEM_GET_MODRM_RM(pVCpu, bRm));
5112 IEM_MC_ADVANCE_RIP_AND_FINISH();
5113 IEM_MC_END();
5114 }
5115 else
5116 {
5117 /*
5118 * Memory, register.
5119 */
5120 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5121 IEM_MC_LOCAL(uint64_t, uSrc);
5122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5123
5124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5125 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5126 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5127 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5128
5129 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5130 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5131
5132 IEM_MC_ADVANCE_RIP_AND_FINISH();
5133 IEM_MC_END();
5134 }
5135
5136}
5137/* Opcode VEX.F2.0F 0x7e - invalid */
5138
5139
5140/* Opcode VEX.0F 0x7f - invalid */
5141
5142/**
5143 * @opcode 0x7f
5144 * @oppfx 0x66
5145 * @opcpuid avx
5146 * @opgroup og_avx_simdint_datamove
5147 * @opxcpttype 1
5148 * @optest op1=1 op2=2 -> op1=2
5149 * @optest op1=0 op2=-42 -> op1=-42
5150 */
5151FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
5152{
5153 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5154 Assert(pVCpu->iem.s.uVexLength <= 1);
5155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5156 if (IEM_IS_MODRM_REG_MODE(bRm))
5157 {
5158 /*
5159 * Register, register.
5160 */
5161 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5162 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5163
5164 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5165 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5166 if (pVCpu->iem.s.uVexLength == 0)
5167 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5168 IEM_GET_MODRM_REG(pVCpu, bRm));
5169 else
5170 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5171 IEM_GET_MODRM_REG(pVCpu, bRm));
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 else if (pVCpu->iem.s.uVexLength == 0)
5176 {
5177 /*
5178 * Register, memory128.
5179 */
5180 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5181 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5183
5184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5185 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5186 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5187 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5188
5189 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5190 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5191
5192 IEM_MC_ADVANCE_RIP_AND_FINISH();
5193 IEM_MC_END();
5194 }
5195 else
5196 {
5197 /*
5198 * Register, memory256.
5199 */
5200 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5201 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5203
5204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5205 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5206 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5207 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5208
5209 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5210 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5211
5212 IEM_MC_ADVANCE_RIP_AND_FINISH();
5213 IEM_MC_END();
5214 }
5215}
5216
5217
5218/**
5219 * @opcode 0x7f
5220 * @oppfx 0xf3
5221 * @opcpuid avx
5222 * @opgroup og_avx_simdint_datamove
5223 * @opxcpttype 4UA
5224 * @optest op1=1 op2=2 -> op1=2
5225 * @optest op1=0 op2=-42 -> op1=-42
5226 */
5227FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5228{
5229 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5230 Assert(pVCpu->iem.s.uVexLength <= 1);
5231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5232 if (IEM_IS_MODRM_REG_MODE(bRm))
5233 {
5234 /*
5235 * Register, register.
5236 */
5237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5238 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5239
5240 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5241 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5242 if (pVCpu->iem.s.uVexLength == 0)
5243 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5244 IEM_GET_MODRM_REG(pVCpu, bRm));
5245 else
5246 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5247 IEM_GET_MODRM_REG(pVCpu, bRm));
5248 IEM_MC_ADVANCE_RIP_AND_FINISH();
5249 IEM_MC_END();
5250 }
5251 else if (pVCpu->iem.s.uVexLength == 0)
5252 {
5253 /*
5254 * Register, memory128.
5255 */
5256 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5257 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5259
5260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5261 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5264
5265 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5266 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5267
5268 IEM_MC_ADVANCE_RIP_AND_FINISH();
5269 IEM_MC_END();
5270 }
5271 else
5272 {
5273 /*
5274 * Register, memory256.
5275 */
5276 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5277 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5279
5280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5281 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5282 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5283 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5284
5285 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5286 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5287
5288 IEM_MC_ADVANCE_RIP_AND_FINISH();
5289 IEM_MC_END();
5290 }
5291}
5292
5293/* Opcode VEX.F2.0F 0x7f - invalid */
5294
5295
5296/* Opcode VEX.0F 0x80 - invalid */
5297/* Opcode VEX.0F 0x81 - invalid */
5298/* Opcode VEX.0F 0x82 - invalid */
5299/* Opcode VEX.0F 0x83 - invalid */
5300/* Opcode VEX.0F 0x84 - invalid */
5301/* Opcode VEX.0F 0x85 - invalid */
5302/* Opcode VEX.0F 0x86 - invalid */
5303/* Opcode VEX.0F 0x87 - invalid */
5304/* Opcode VEX.0F 0x88 - invalid */
5305/* Opcode VEX.0F 0x89 - invalid */
5306/* Opcode VEX.0F 0x8a - invalid */
5307/* Opcode VEX.0F 0x8b - invalid */
5308/* Opcode VEX.0F 0x8c - invalid */
5309/* Opcode VEX.0F 0x8d - invalid */
5310/* Opcode VEX.0F 0x8e - invalid */
5311/* Opcode VEX.0F 0x8f - invalid */
5312/* Opcode VEX.0F 0x90 - invalid */
5313/* Opcode VEX.0F 0x91 - invalid */
5314/* Opcode VEX.0F 0x92 - invalid */
5315/* Opcode VEX.0F 0x93 - invalid */
5316/* Opcode VEX.0F 0x94 - invalid */
5317/* Opcode VEX.0F 0x95 - invalid */
5318/* Opcode VEX.0F 0x96 - invalid */
5319/* Opcode VEX.0F 0x97 - invalid */
5320/* Opcode VEX.0F 0x98 - invalid */
5321/* Opcode VEX.0F 0x99 - invalid */
5322/* Opcode VEX.0F 0x9a - invalid */
5323/* Opcode VEX.0F 0x9b - invalid */
5324/* Opcode VEX.0F 0x9c - invalid */
5325/* Opcode VEX.0F 0x9d - invalid */
5326/* Opcode VEX.0F 0x9e - invalid */
5327/* Opcode VEX.0F 0x9f - invalid */
5328/* Opcode VEX.0F 0xa0 - invalid */
5329/* Opcode VEX.0F 0xa1 - invalid */
5330/* Opcode VEX.0F 0xa2 - invalid */
5331/* Opcode VEX.0F 0xa3 - invalid */
5332/* Opcode VEX.0F 0xa4 - invalid */
5333/* Opcode VEX.0F 0xa5 - invalid */
5334/* Opcode VEX.0F 0xa6 - invalid */
5335/* Opcode VEX.0F 0xa7 - invalid */
5336/* Opcode VEX.0F 0xa8 - invalid */
5337/* Opcode VEX.0F 0xa9 - invalid */
5338/* Opcode VEX.0F 0xaa - invalid */
5339/* Opcode VEX.0F 0xab - invalid */
5340/* Opcode VEX.0F 0xac - invalid */
5341/* Opcode VEX.0F 0xad - invalid */
5342
5343
5344/* Opcode VEX.0F 0xae mem/0 - invalid. */
5345/* Opcode VEX.0F 0xae mem/1 - invalid. */
5346
5347/**
5348 * @ opmaps grp15
5349 * @ opcode !11/2
5350 * @ oppfx none
5351 * @ opcpuid sse
5352 * @ opgroup og_sse_mxcsrsm
5353 * @ opxcpttype 5
5354 * @ optest op1=0 -> mxcsr=0
5355 * @ optest op1=0x2083 -> mxcsr=0x2083
5356 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5357 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5358 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5359 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5360 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5361 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5362 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5363 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5364 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5365 */
5366FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
5367//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5368//{
5369// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5370// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5371// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5372// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5373// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5374// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5375// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5376// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5377// IEM_MC_END();
5378// return VINF_SUCCESS;
5379//}
5380
5381
5382/**
5383 * @opmaps vexgrp15
5384 * @opcode !11/3
5385 * @oppfx none
5386 * @opcpuid avx
5387 * @opgroup og_avx_mxcsrsm
5388 * @opxcpttype 5
5389 * @optest mxcsr=0 -> op1=0
5390 * @optest mxcsr=0x2083 -> op1=0x2083
5391 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5392 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5393 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5394 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5395 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5396 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5397 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5398 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5399 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5400 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5401 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5402 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5403 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5404 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5405 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5406 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5407 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5408 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5409 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5410 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5411 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5412 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5413 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5414 * -> value.xcpt=0x6
5415 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5416 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5417 * APMv4 rev 3.17 page 509.
5418 * @todo Test this instruction on AMD Ryzen.
5419 */
5420FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5421{
5422 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5423 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5424 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5426 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5428 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5429 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5430 IEM_MC_END();
5431}
5432
5433/* Opcode VEX.0F 0xae mem/4 - invalid. */
5434/* Opcode VEX.0F 0xae mem/5 - invalid. */
5435/* Opcode VEX.0F 0xae mem/6 - invalid. */
5436/* Opcode VEX.0F 0xae mem/7 - invalid. */
5437
5438/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5439/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5440/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5441/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5442/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5443/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5444/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5445/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5446
5447/**
5448 * Vex group 15 jump table for memory variant.
5449 */
5450IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5451{ /* pfx: none, 066h, 0f3h, 0f2h */
5452 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5453 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5454 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5455 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5456 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5457 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5458 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5459 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5460};
5461AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5462
5463
5464/** Opcode vex. 0xae. */
5465FNIEMOP_DEF(iemOp_VGrp15)
5466{
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if (IEM_IS_MODRM_REG_MODE(bRm))
5469 /* register, register */
5470 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5471
5472 /* memory, register */
5473 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5474 + pVCpu->iem.s.idxPrefix], bRm);
5475}
5476
5477
5478/* Opcode VEX.0F 0xaf - invalid. */
5479
5480/* Opcode VEX.0F 0xb0 - invalid. */
5481/* Opcode VEX.0F 0xb1 - invalid. */
5482/* Opcode VEX.0F 0xb2 - invalid. */
5483/* Opcode VEX.0F 0xb2 - invalid. */
5484/* Opcode VEX.0F 0xb3 - invalid. */
5485/* Opcode VEX.0F 0xb4 - invalid. */
5486/* Opcode VEX.0F 0xb5 - invalid. */
5487/* Opcode VEX.0F 0xb6 - invalid. */
5488/* Opcode VEX.0F 0xb7 - invalid. */
5489/* Opcode VEX.0F 0xb8 - invalid. */
5490/* Opcode VEX.0F 0xb9 - invalid. */
5491/* Opcode VEX.0F 0xba - invalid. */
5492/* Opcode VEX.0F 0xbb - invalid. */
5493/* Opcode VEX.0F 0xbc - invalid. */
5494/* Opcode VEX.0F 0xbd - invalid. */
5495/* Opcode VEX.0F 0xbe - invalid. */
5496/* Opcode VEX.0F 0xbf - invalid. */
5497
5498/* Opcode VEX.0F 0xc0 - invalid. */
5499/* Opcode VEX.66.0F 0xc0 - invalid. */
5500/* Opcode VEX.F3.0F 0xc0 - invalid. */
5501/* Opcode VEX.F2.0F 0xc0 - invalid. */
5502
5503/* Opcode VEX.0F 0xc1 - invalid. */
5504/* Opcode VEX.66.0F 0xc1 - invalid. */
5505/* Opcode VEX.F3.0F 0xc1 - invalid. */
5506/* Opcode VEX.F2.0F 0xc1 - invalid. */
5507
5508#define IEMOP_VCMPP_BODY(a_Instr) \
5509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5510 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5511 { \
5512 /* \
5513 * Register, Register. \
5514 */ \
5515 if (pVCpu->iem.s.uVexLength) \
5516 { \
5517 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5518 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5519 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5521 IEM_MC_PREPARE_AVX_USAGE(); \
5522 IEM_MC_LOCAL(X86YMMREG, uDst); \
5523 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5524 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5525 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5526 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5527 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5528 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5529 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5530 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5531 puDst, puSrc, bImmArg); \
5532 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5533 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5534 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5535 IEM_MC_END(); \
5536 } \
5537 else \
5538 { \
5539 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5540 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5541 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5542 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5543 IEM_MC_PREPARE_AVX_USAGE(); \
5544 IEM_MC_LOCAL(X86XMMREG, uDst); \
5545 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5546 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5547 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5548 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5549 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5550 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5551 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5552 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5553 puDst, puSrc, bImmArg); \
5554 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5555 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5557 IEM_MC_END(); \
5558 } \
5559 } \
5560 else \
5561 { \
5562 /* \
5563 * Register, Memory. \
5564 */ \
5565 if (pVCpu->iem.s.uVexLength) \
5566 { \
5567 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5570 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5571 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5573 IEM_MC_PREPARE_AVX_USAGE(); \
5574 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5575 IEM_MC_LOCAL(X86YMMREG, uDst); \
5576 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5577 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5578 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5579 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5580 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5581 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5582 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5583 puDst, puSrc, bImmArg); \
5584 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5585 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5586 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5587 IEM_MC_END(); \
5588 } \
5589 else \
5590 { \
5591 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5594 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5595 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5596 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5597 IEM_MC_PREPARE_AVX_USAGE(); \
5598 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5599 IEM_MC_LOCAL(X86XMMREG, uDst); \
5600 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5601 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5602 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5603 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5604 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5605 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5606 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5607 puDst, puSrc, bImmArg); \
5608 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5609 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5610 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5611 IEM_MC_END(); \
5612 } \
5613 } \
5614 (void)0
5615
5616
5617/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5618FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5619{
5620 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5621 IEMOP_VCMPP_BODY(vcmpps);
5622}
5623
5624
5625/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5626FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5627{
5628 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5629 IEMOP_VCMPP_BODY(vcmppd);
5630}
5631
5632
5633/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5634FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5635{
5636 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5637
5638 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5639 if (IEM_IS_MODRM_REG_MODE(bRm))
5640 {
5641 /*
5642 * XMM32, XMM32.
5643 */
5644 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5645 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5646 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5647 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5648 IEM_MC_PREPARE_AVX_USAGE();
5649 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5650 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5651 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5652 IEM_MC_LOCAL(X86XMMREG, uDst);
5653 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5654 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5655 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5656 puDst, puSrc, bImmArg);
5657 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5658 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5659
5660 IEM_MC_ADVANCE_RIP_AND_FINISH();
5661 IEM_MC_END();
5662 }
5663 else
5664 {
5665 /*
5666 * XMM32, [mem32].
5667 */
5668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5669
5670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5672 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5673 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5674 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5675 IEM_MC_PREPARE_AVX_USAGE();
5676
5677 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5678 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5679 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5680 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5681 IEM_MC_LOCAL(X86XMMREG, uDst);
5682 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5683 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5684 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5685 puDst, puSrc, bImmArg);
5686 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5688
5689 IEM_MC_ADVANCE_RIP_AND_FINISH();
5690 IEM_MC_END();
5691 }
5692}
5693
5694
5695/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5696FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5697{
5698 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5699
5700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5701 if (IEM_IS_MODRM_REG_MODE(bRm))
5702 {
5703 /*
5704 * XMM64, XMM64.
5705 */
5706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5707 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5708 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5709 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5710 IEM_MC_PREPARE_AVX_USAGE();
5711 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5712 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5713 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5714 IEM_MC_LOCAL(X86XMMREG, uDst);
5715 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5716 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5717 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5718 puDst, puSrc, bImmArg);
5719 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5720 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5721
5722 IEM_MC_ADVANCE_RIP_AND_FINISH();
5723 IEM_MC_END();
5724 }
5725 else
5726 {
5727 /*
5728 * XMM64, [mem64].
5729 */
5730 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5731
5732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5734 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5735 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5736 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5737 IEM_MC_PREPARE_AVX_USAGE();
5738
5739 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5740 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5741 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5742 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5743 IEM_MC_LOCAL(X86XMMREG, uDst);
5744 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5745 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5746 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5747 puDst, puSrc, bImmArg);
5748 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5749 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5750
5751 IEM_MC_ADVANCE_RIP_AND_FINISH();
5752 IEM_MC_END();
5753 }
5754}
5755
5756
5757/* Opcode VEX.0F 0xc3 - invalid */
5758/* Opcode VEX.66.0F 0xc3 - invalid */
5759/* Opcode VEX.F3.0F 0xc3 - invalid */
5760/* Opcode VEX.F2.0F 0xc3 - invalid */
5761
5762/* Opcode VEX.0F 0xc4 - invalid */
5763
5764
5765/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5766FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5767{
5768 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5770 if (IEM_IS_MODRM_REG_MODE(bRm))
5771 {
5772 /*
5773 * Register, register.
5774 */
5775 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5776 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5777 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5778 IEM_MC_LOCAL(uint16_t, uValue);
5779
5780 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5782 IEM_MC_PREPARE_AVX_USAGE();
5783
5784 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5785 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5786 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5787 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5788 IEM_MC_ADVANCE_RIP_AND_FINISH();
5789 IEM_MC_END();
5790 }
5791 else
5792 {
5793 /*
5794 * Register, memory.
5795 */
5796 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5798 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5799 IEM_MC_LOCAL(uint16_t, uValue);
5800
5801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5802 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5803 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5804 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5805 IEM_MC_PREPARE_AVX_USAGE();
5806
5807 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5808 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5809 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5810 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5811 IEM_MC_ADVANCE_RIP_AND_FINISH();
5812 IEM_MC_END();
5813 }
5814}
5815
5816
5817/* Opcode VEX.F3.0F 0xc4 - invalid */
5818/* Opcode VEX.F2.0F 0xc4 - invalid */
5819
5820/* Opcode VEX.0F 0xc5 - invalid */
5821
5822
5823/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
5824FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
5825{
5826 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
5827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5828 if (IEM_IS_MODRM_REG_MODE(bRm))
5829 {
5830 /*
5831 * greg32, XMM, imm8.
5832 */
5833 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5834 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5835 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5836 IEM_MC_LOCAL(uint16_t, uValue);
5837 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5838 IEM_MC_PREPARE_AVX_USAGE();
5839 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
5840 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
5841 IEM_MC_ADVANCE_RIP_AND_FINISH();
5842 IEM_MC_END();
5843 }
5844 /* No memory operand. */
5845 else
5846 IEMOP_RAISE_INVALID_OPCODE_RET();
5847}
5848
5849
5850/* Opcode VEX.F3.0F 0xc5 - invalid */
5851/* Opcode VEX.F2.0F 0xc5 - invalid */
5852
5853
5854#define VSHUFP_X(a_Instr) \
5855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5856 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5857 { \
5858 /* \
5859 * Register, register. \
5860 */ \
5861 if (pVCpu->iem.s.uVexLength) \
5862 { \
5863 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5865 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5866 IEM_MC_LOCAL(RTUINT256U, uDst); \
5867 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5868 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5869 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5870 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5871 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5872 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5873 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5874 IEM_MC_PREPARE_AVX_USAGE(); \
5875 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5876 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5877 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5878 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5879 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5880 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5881 IEM_MC_END(); \
5882 } \
5883 else \
5884 { \
5885 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5886 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5887 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5888 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5889 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5890 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
5891 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5893 IEM_MC_PREPARE_AVX_USAGE(); \
5894 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5895 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5896 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5897 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5898 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5899 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5900 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5901 IEM_MC_END(); \
5902 } \
5903 } \
5904 else \
5905 { \
5906 /* \
5907 * Register, memory. \
5908 */ \
5909 if (pVCpu->iem.s.uVexLength) \
5910 { \
5911 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5912 IEM_MC_LOCAL(RTUINT256U, uDst); \
5913 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
5914 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
5915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5916 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
5917 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
5918 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
5919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5920 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5921 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5922 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
5923 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5924 IEM_MC_PREPARE_AVX_USAGE(); \
5925 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5926 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5927 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
5928 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5929 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5931 IEM_MC_END(); \
5932 } \
5933 else \
5934 { \
5935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5936 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
5937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5938 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
5939 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
5940 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
5941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5943 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
5944 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5945 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5946 IEM_MC_PREPARE_AVX_USAGE(); \
5947 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5948 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5949 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
5950 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
5951 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
5952 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
5953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5954 IEM_MC_END(); \
5955 } \
5956 } \
5957 (void)0
5958
5959/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
5960FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
5961{
5962 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5963 VSHUFP_X(vshufps);
5964}
5965
5966
5967/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
5968FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
5969{
5970 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
5971 VSHUFP_X(vshufpd);
5972}
5973#undef VSHUFP_X
5974
5975
5976/* Opcode VEX.F3.0F 0xc6 - invalid */
5977/* Opcode VEX.F2.0F 0xc6 - invalid */
5978
5979/* Opcode VEX.0F 0xc7 - invalid */
5980/* Opcode VEX.66.0F 0xc7 - invalid */
5981/* Opcode VEX.F3.0F 0xc7 - invalid */
5982/* Opcode VEX.F2.0F 0xc7 - invalid */
5983
5984/* Opcode VEX.0F 0xc8 - invalid */
5985/* Opcode VEX.0F 0xc9 - invalid */
5986/* Opcode VEX.0F 0xca - invalid */
5987/* Opcode VEX.0F 0xcb - invalid */
5988/* Opcode VEX.0F 0xcc - invalid */
5989/* Opcode VEX.0F 0xcd - invalid */
5990/* Opcode VEX.0F 0xce - invalid */
5991/* Opcode VEX.0F 0xcf - invalid */
5992
5993
5994/* Opcode VEX.0F 0xd0 - invalid */
5995
5996
5997/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
5998FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
5999{
6000 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6001 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
6002 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6003}
6004
6005
6006/* Opcode VEX.F3.0F 0xd0 - invalid */
6007
6008
6009/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
6010FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
6011{
6012 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6013 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
6014 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6015}
6016
6017
6018/* Opcode VEX.0F 0xd1 - invalid */
6019
6020
6021/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
6022FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
6023{
6024 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6025 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
6026 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6027}
6028
6029/* Opcode VEX.F3.0F 0xd1 - invalid */
6030/* Opcode VEX.F2.0F 0xd1 - invalid */
6031
6032/* Opcode VEX.0F 0xd2 - invalid */
6033/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
6034FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
6035{
6036 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6037 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
6038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6039}
6040
6041/* Opcode VEX.F3.0F 0xd2 - invalid */
6042/* Opcode VEX.F2.0F 0xd2 - invalid */
6043
6044/* Opcode VEX.0F 0xd3 - invalid */
6045/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
6046FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
6047{
6048 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6049 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
6050 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6051}
6052
6053/* Opcode VEX.F3.0F 0xd3 - invalid */
6054/* Opcode VEX.F2.0F 0xd3 - invalid */
6055
6056/* Opcode VEX.0F 0xd4 - invalid */
6057
6058
6059/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
6060FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
6061{
6062 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6063 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
6064 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6065}
6066
6067
6068/* Opcode VEX.F3.0F 0xd4 - invalid */
6069/* Opcode VEX.F2.0F 0xd4 - invalid */
6070
6071/* Opcode VEX.0F 0xd5 - invalid */
6072
6073
6074/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
6075FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
6076{
6077 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6078 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
6079 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6080}
6081
6082
6083/* Opcode VEX.F3.0F 0xd5 - invalid */
6084/* Opcode VEX.F2.0F 0xd5 - invalid */
6085
6086/* Opcode VEX.0F 0xd6 - invalid */
6087
6088/**
6089 * @opcode 0xd6
6090 * @oppfx 0x66
6091 * @opcpuid avx
6092 * @opgroup og_avx_pcksclr_datamove
6093 * @opxcpttype none
6094 * @optest op1=-1 op2=2 -> op1=2
6095 * @optest op1=0 op2=-42 -> op1=-42
6096 */
6097FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
6098{
6099 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
6100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6101 if (IEM_IS_MODRM_REG_MODE(bRm))
6102 {
6103 /*
6104 * Register, register.
6105 */
6106 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6107 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6108
6109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6110 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6111
6112 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
6113 IEM_GET_MODRM_REG(pVCpu, bRm));
6114 IEM_MC_ADVANCE_RIP_AND_FINISH();
6115 IEM_MC_END();
6116 }
6117 else
6118 {
6119 /*
6120 * Memory, register.
6121 */
6122 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6123 IEM_MC_LOCAL(uint64_t, uSrc);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6127 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6128 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6129 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6130
6131 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
6132 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6133
6134 IEM_MC_ADVANCE_RIP_AND_FINISH();
6135 IEM_MC_END();
6136 }
6137}
6138
6139/* Opcode VEX.F3.0F 0xd6 - invalid */
6140/* Opcode VEX.F2.0F 0xd6 - invalid */
6141
6142
6143/* Opcode VEX.0F 0xd7 - invalid */
6144
6145/** Opcode VEX.66.0F 0xd7 - */
6146FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
6147{
6148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6149 /* Docs says register only. */
6150 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
6151 {
6152 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6153 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
6154 if (pVCpu->iem.s.uVexLength)
6155 {
6156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
6158 IEM_MC_ARG(uint64_t *, puDst, 0);
6159 IEM_MC_LOCAL(RTUINT256U, uSrc);
6160 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
6161 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6162 IEM_MC_PREPARE_AVX_USAGE();
6163 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6164 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6165 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
6166 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
6167 IEM_MC_ADVANCE_RIP_AND_FINISH();
6168 IEM_MC_END();
6169 }
6170 else
6171 {
6172 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6173 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6174 IEM_MC_ARG(uint64_t *, puDst, 0);
6175 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6176 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6177 IEM_MC_PREPARE_AVX_USAGE();
6178 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6179 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6180 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
6181 IEM_MC_ADVANCE_RIP_AND_FINISH();
6182 IEM_MC_END();
6183 }
6184 }
6185 else
6186 IEMOP_RAISE_INVALID_OPCODE_RET();
6187}
6188
6189
6190/* Opcode VEX.F3.0F 0xd7 - invalid */
6191/* Opcode VEX.F2.0F 0xd7 - invalid */
6192
6193
6194/* Opcode VEX.0F 0xd8 - invalid */
6195
6196/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
6197FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
6198{
6199 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6200 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
6201 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6202}
6203
6204
6205/* Opcode VEX.F3.0F 0xd8 - invalid */
6206/* Opcode VEX.F2.0F 0xd8 - invalid */
6207
6208/* Opcode VEX.0F 0xd9 - invalid */
6209
6210
6211/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
6212FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
6213{
6214 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6215 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
6216 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6217}
6218
6219
6220/* Opcode VEX.F3.0F 0xd9 - invalid */
6221/* Opcode VEX.F2.0F 0xd9 - invalid */
6222
6223/* Opcode VEX.0F 0xda - invalid */
6224
6225
6226/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6227FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6228{
6229 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6230 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6231 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6232}
6233
6234
6235/* Opcode VEX.F3.0F 0xda - invalid */
6236/* Opcode VEX.F2.0F 0xda - invalid */
6237
6238/* Opcode VEX.0F 0xdb - invalid */
6239
6240
6241/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6242FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6243{
6244 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6245 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6246 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6247}
6248
6249
6250/* Opcode VEX.F3.0F 0xdb - invalid */
6251/* Opcode VEX.F2.0F 0xdb - invalid */
6252
6253/* Opcode VEX.0F 0xdc - invalid */
6254
6255
6256/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6257FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6258{
6259 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6260 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6261 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6262}
6263
6264
6265/* Opcode VEX.F3.0F 0xdc - invalid */
6266/* Opcode VEX.F2.0F 0xdc - invalid */
6267
6268/* Opcode VEX.0F 0xdd - invalid */
6269
6270
6271/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6272FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6273{
6274 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6275 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6277}
6278
6279
6280/* Opcode VEX.F3.0F 0xdd - invalid */
6281/* Opcode VEX.F2.0F 0xdd - invalid */
6282
6283/* Opcode VEX.0F 0xde - invalid */
6284
6285
6286/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6287FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6288{
6289 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6290 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6291 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6292}
6293
6294
6295/* Opcode VEX.F3.0F 0xde - invalid */
6296/* Opcode VEX.F2.0F 0xde - invalid */
6297
6298/* Opcode VEX.0F 0xdf - invalid */
6299
6300
6301/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6302FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6303{
6304 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6305 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6306 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6307}
6308
6309
6310/* Opcode VEX.F3.0F 0xdf - invalid */
6311/* Opcode VEX.F2.0F 0xdf - invalid */
6312
6313/* Opcode VEX.0F 0xe0 - invalid */
6314
6315
6316/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6317FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6318{
6319 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6320 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6321 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6322}
6323
6324
6325/* Opcode VEX.F3.0F 0xe0 - invalid */
6326/* Opcode VEX.F2.0F 0xe0 - invalid */
6327
6328/* Opcode VEX.0F 0xe1 - invalid */
6329/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6330FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6331{
6332 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6333 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6334 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6335}
6336
6337/* Opcode VEX.F3.0F 0xe1 - invalid */
6338/* Opcode VEX.F2.0F 0xe1 - invalid */
6339
6340/* Opcode VEX.0F 0xe2 - invalid */
6341/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6342FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6343{
6344 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6345 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6346 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6347}
6348
6349/* Opcode VEX.F3.0F 0xe2 - invalid */
6350/* Opcode VEX.F2.0F 0xe2 - invalid */
6351
6352/* Opcode VEX.0F 0xe3 - invalid */
6353
6354
6355/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6356FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6357{
6358 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6359 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6360 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6361}
6362
6363
6364/* Opcode VEX.F3.0F 0xe3 - invalid */
6365/* Opcode VEX.F2.0F 0xe3 - invalid */
6366
6367/* Opcode VEX.0F 0xe4 - invalid */
6368
6369
6370/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6371FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6372{
6373 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6374 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6375 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6376}
6377
6378
6379/* Opcode VEX.F3.0F 0xe4 - invalid */
6380/* Opcode VEX.F2.0F 0xe4 - invalid */
6381
6382/* Opcode VEX.0F 0xe5 - invalid */
6383
6384
6385/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6386FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6387{
6388 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6389 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6390 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6391}
6392
6393
6394/* Opcode VEX.F3.0F 0xe5 - invalid */
6395/* Opcode VEX.F2.0F 0xe5 - invalid */
6396
6397/* Opcode VEX.0F 0xe6 - invalid */
6398/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6399FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
6400/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6401FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
6402/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6403FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
6404
6405
6406/* Opcode VEX.0F 0xe7 - invalid */
6407
6408/**
6409 * @opcode 0xe7
6410 * @opcodesub !11 mr/reg
6411 * @oppfx 0x66
6412 * @opcpuid avx
6413 * @opgroup og_avx_cachect
6414 * @opxcpttype 1
6415 * @optest op1=-1 op2=2 -> op1=2
6416 * @optest op1=0 op2=-42 -> op1=-42
6417 */
6418FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6419{
6420 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6421 Assert(pVCpu->iem.s.uVexLength <= 1);
6422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6423 if (IEM_IS_MODRM_MEM_MODE(bRm))
6424 {
6425 if (pVCpu->iem.s.uVexLength == 0)
6426 {
6427 /*
6428 * 128-bit: Memory, register.
6429 */
6430 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6431 IEM_MC_LOCAL(RTUINT128U, uSrc);
6432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6433
6434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6438
6439 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6440 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6441
6442 IEM_MC_ADVANCE_RIP_AND_FINISH();
6443 IEM_MC_END();
6444 }
6445 else
6446 {
6447 /*
6448 * 256-bit: Memory, register.
6449 */
6450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6451 IEM_MC_LOCAL(RTUINT256U, uSrc);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6453
6454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6455 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6457 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6458
6459 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6460 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6461
6462 IEM_MC_ADVANCE_RIP_AND_FINISH();
6463 IEM_MC_END();
6464 }
6465 }
6466 /**
6467 * @opdone
6468 * @opmnemonic udvex660fe7reg
6469 * @opcode 0xe7
6470 * @opcodesub 11 mr/reg
6471 * @oppfx 0x66
6472 * @opunused immediate
6473 * @opcpuid avx
6474 * @optest ->
6475 */
6476 else
6477 IEMOP_RAISE_INVALID_OPCODE_RET();
6478}
6479
6480/* Opcode VEX.F3.0F 0xe7 - invalid */
6481/* Opcode VEX.F2.0F 0xe7 - invalid */
6482
6483
6484/* Opcode VEX.0F 0xe8 - invalid */
6485
6486
6487/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6488FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6489{
6490 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6491 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
6492 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6493}
6494
6495
6496/* Opcode VEX.F3.0F 0xe8 - invalid */
6497/* Opcode VEX.F2.0F 0xe8 - invalid */
6498
6499/* Opcode VEX.0F 0xe9 - invalid */
6500
6501
6502/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
6503FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
6504{
6505 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6506 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
6507 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6508}
6509
6510
6511/* Opcode VEX.F3.0F 0xe9 - invalid */
6512/* Opcode VEX.F2.0F 0xe9 - invalid */
6513
6514/* Opcode VEX.0F 0xea - invalid */
6515
6516
6517/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
6518FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
6519{
6520 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6521 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
6522 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6523}
6524
6525
6526/* Opcode VEX.F3.0F 0xea - invalid */
6527/* Opcode VEX.F2.0F 0xea - invalid */
6528
6529/* Opcode VEX.0F 0xeb - invalid */
6530
6531
6532/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
6533FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
6534{
6535 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6536 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6537 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
6538}
6539
6540
6541
6542/* Opcode VEX.F3.0F 0xeb - invalid */
6543/* Opcode VEX.F2.0F 0xeb - invalid */
6544
6545/* Opcode VEX.0F 0xec - invalid */
6546
6547
6548/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
6549FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
6550{
6551 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6552 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
6553 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6554}
6555
6556
6557/* Opcode VEX.F3.0F 0xec - invalid */
6558/* Opcode VEX.F2.0F 0xec - invalid */
6559
6560/* Opcode VEX.0F 0xed - invalid */
6561
6562
6563/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
6564FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
6565{
6566 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6567 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
6568 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6569}
6570
6571
6572/* Opcode VEX.F3.0F 0xed - invalid */
6573/* Opcode VEX.F2.0F 0xed - invalid */
6574
6575/* Opcode VEX.0F 0xee - invalid */
6576
6577
6578/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
6579FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
6580{
6581 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6582 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
6583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6584}
6585
6586
6587/* Opcode VEX.F3.0F 0xee - invalid */
6588/* Opcode VEX.F2.0F 0xee - invalid */
6589
6590
6591/* Opcode VEX.0F 0xef - invalid */
6592
6593
6594/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
6595FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
6596{
6597 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6598 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6599 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
6600}
6601
6602
6603/* Opcode VEX.F3.0F 0xef - invalid */
6604/* Opcode VEX.F2.0F 0xef - invalid */
6605
6606/* Opcode VEX.0F 0xf0 - invalid */
6607/* Opcode VEX.66.0F 0xf0 - invalid */
6608
6609
6610/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
6611FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
6612{
6613 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6614 Assert(pVCpu->iem.s.uVexLength <= 1);
6615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6616 if (IEM_IS_MODRM_REG_MODE(bRm))
6617 {
6618 /*
6619 * Register, register - (not implemented, assuming it raises \#UD).
6620 */
6621 IEMOP_RAISE_INVALID_OPCODE_RET();
6622 }
6623 else if (pVCpu->iem.s.uVexLength == 0)
6624 {
6625 /*
6626 * Register, memory128.
6627 */
6628 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6629 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6631
6632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6634 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6635 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6636
6637 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6638 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6639
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642 }
6643 else
6644 {
6645 /*
6646 * Register, memory256.
6647 */
6648 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6649 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
6650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6651
6652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6653 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6654 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6655 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6656
6657 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6658 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
6659
6660 IEM_MC_ADVANCE_RIP_AND_FINISH();
6661 IEM_MC_END();
6662 }
6663}
6664
6665
6666/* Opcode VEX.0F 0xf1 - invalid */
6667/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
6668FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
6669{
6670 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6671 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
6672 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6673}
6674
6675/* Opcode VEX.F2.0F 0xf1 - invalid */
6676
6677/* Opcode VEX.0F 0xf2 - invalid */
6678/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
6679FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
6680{
6681 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6682 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
6683 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6684}
6685/* Opcode VEX.F2.0F 0xf2 - invalid */
6686
6687/* Opcode VEX.0F 0xf3 - invalid */
6688/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
6689FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
6690{
6691 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6692 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
6693 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6694}
6695/* Opcode VEX.F2.0F 0xf3 - invalid */
6696
6697/* Opcode VEX.0F 0xf4 - invalid */
6698
6699
6700/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
6701FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
6702{
6703 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6704 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
6705 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6706}
6707
6708
6709/* Opcode VEX.F2.0F 0xf4 - invalid */
6710
6711/* Opcode VEX.0F 0xf5 - invalid */
6712
6713
6714/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6715FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6716{
6717 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6718 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6719 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6720}
6721
6722
6723/* Opcode VEX.F2.0F 0xf5 - invalid */
6724
6725/* Opcode VEX.0F 0xf6 - invalid */
6726
6727
6728/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6729FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6730{
6731 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6732 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6733 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6734}
6735
6736
6737/* Opcode VEX.F2.0F 0xf6 - invalid */
6738
6739/* Opcode VEX.0F 0xf7 - invalid */
6740
6741
6742/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6743FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6744{
6745// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6746 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6747 if (IEM_IS_MODRM_REG_MODE(bRm))
6748 {
6749 /*
6750 * XMM, XMM, (implicit) [ ER]DI
6751 */
6752 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6753 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6754 IEM_MC_LOCAL( uint64_t, u64EffAddr);
6755 IEM_MC_LOCAL( RTUINT128U, u128Mem);
6756 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
6757 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
6758 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
6759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6760 IEM_MC_PREPARE_AVX_USAGE();
6761
6762 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
6763 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
6764 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6765 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
6766 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
6767 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
6768
6769 IEM_MC_ADVANCE_RIP_AND_FINISH();
6770 IEM_MC_END();
6771 }
6772 else
6773 {
6774 /* The memory, register encoding is invalid. */
6775 IEMOP_RAISE_INVALID_OPCODE_RET();
6776 }
6777}
6778
6779
6780/* Opcode VEX.F2.0F 0xf7 - invalid */
6781
6782/* Opcode VEX.0F 0xf8 - invalid */
6783
6784
6785/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
6786FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
6787{
6788 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6789 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
6790 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6791}
6792
6793
6794/* Opcode VEX.F2.0F 0xf8 - invalid */
6795
6796/* Opcode VEX.0F 0xf9 - invalid */
6797
6798
6799/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
6800FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
6801{
6802 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6803 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
6804 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6805}
6806
6807
6808/* Opcode VEX.F2.0F 0xf9 - invalid */
6809
6810/* Opcode VEX.0F 0xfa - invalid */
6811
6812
6813/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
6814FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
6815{
6816 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6817 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
6818 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6819}
6820
6821
6822/* Opcode VEX.F2.0F 0xfa - invalid */
6823
6824/* Opcode VEX.0F 0xfb - invalid */
6825
6826
6827/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
6828FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
6829{
6830 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6831 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
6832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6833}
6834
6835
6836/* Opcode VEX.F2.0F 0xfb - invalid */
6837
6838/* Opcode VEX.0F 0xfc - invalid */
6839
6840
6841/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
6842FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
6843{
6844 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6845 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
6846 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6847}
6848
6849
6850/* Opcode VEX.F2.0F 0xfc - invalid */
6851
6852/* Opcode VEX.0F 0xfd - invalid */
6853
6854
6855/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
6856FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
6857{
6858 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6859 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
6860 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6861}
6862
6863
6864/* Opcode VEX.F2.0F 0xfd - invalid */
6865
6866/* Opcode VEX.0F 0xfe - invalid */
6867
6868
6869/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
6870FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
6871{
6872 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6873 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
6874 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6875}
6876
6877
6878/* Opcode VEX.F2.0F 0xfe - invalid */
6879
6880
6881/** Opcode **** 0x0f 0xff - UD0 */
6882FNIEMOP_DEF(iemOp_vud0)
6883{
6884/** @todo testcase: vud0 */
6885 IEMOP_MNEMONIC(vud0, "vud0");
6886 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
6887 {
6888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
6889 if (IEM_IS_MODRM_MEM_MODE(bRm))
6890 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
6891 }
6892 IEMOP_HLP_DONE_DECODING();
6893 IEMOP_RAISE_INVALID_OPCODE_RET();
6894}
6895
6896
6897
6898/**
6899 * VEX opcode map \#1.
6900 *
6901 * @sa g_apfnTwoByteMap
6902 */
6903const PFNIEMOP g_apfnVexMap1[] =
6904{
6905 /* no prefix, 066h prefix f3h prefix, f2h prefix */
6906 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
6907 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
6908 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
6909 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
6910 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
6911 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
6912 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
6913 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
6914 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
6915 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
6916 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
6917 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
6918 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
6919 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
6920 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
6921 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
6922
6923 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
6924 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
6925 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
6926 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6927 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6928 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6929 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
6930 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6931 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
6932 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
6933 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
6934 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
6935 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
6936 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
6937 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
6938 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
6939
6940 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
6941 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
6942 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
6943 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
6944 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
6945 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
6946 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
6947 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
6948 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6949 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6950 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
6951 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6952 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
6953 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
6954 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6955 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6956
6957 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
6958 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
6959 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
6960 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
6961 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
6962 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
6963 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
6964 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
6965 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6966 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6967 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6968 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6969 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6970 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6971 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6972 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
6973
6974 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
6975 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
6976 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
6977 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
6978 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
6979 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
6980 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
6981 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
6982 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
6983 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
6984 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
6985 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
6986 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
6987 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
6988 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
6989 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
6990
6991 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6992 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
6993 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6994 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
6995 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6996 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6997 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6998 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6999 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7000 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7001 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7002 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7003 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7004 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7005 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7006 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7007
7008 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7009 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7010 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7011 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7012 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7013 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7014 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7015 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7016 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7017 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7018 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7019 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7020 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7021 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7022 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7023 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7024
7025 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7026 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7027 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7028 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7029 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7030 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7031 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7032 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7033 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
7034 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
7035 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
7036 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
7037 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7038 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7039 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7040 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7041
7042 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
7043 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
7044 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
7045 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
7046 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
7047 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
7048 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
7049 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
7050 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
7051 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
7052 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
7053 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
7054 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
7055 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
7056 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
7057 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
7058
7059 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
7060 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
7061 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
7062 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
7063 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
7064 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
7065 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
7066 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
7067 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
7068 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
7069 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
7070 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
7071 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
7072 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
7073 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
7074 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
7075
7076 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7077 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7078 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7079 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7080 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7081 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7082 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7083 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7084 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7085 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7086 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
7087 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
7088 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
7089 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
7090 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
7091 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
7092
7093 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7094 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7095 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7096 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7097 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7098 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7099 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7100 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7101 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7102 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7103 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
7104 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
7105 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
7106 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
7107 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
7108 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
7109
7110 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7111 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7112 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7113 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7114 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7115 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7116 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7117 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7118 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7119 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7120 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
7121 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
7122 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
7123 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
7124 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
7125 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
7126
7127 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7128 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7129 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7130 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7131 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7132 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7133 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7134 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7135 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7136 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7137 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7138 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7139 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7140 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7141 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7142 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7143
7144 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7145 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7146 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7147 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7148 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7149 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7150 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7151 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7152 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7153 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7154 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7155 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7156 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7157 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7158 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7159 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7160
7161 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7162 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7163 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7164 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7165 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7166 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7167 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7168 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7169 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7170 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7171 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7172 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7173 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7174 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7175 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7176 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
7177};
7178AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
7179/** @} */
7180
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette