VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105351

Last change on this file since 105351 was 105351, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vcvttps2dq instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 287.3 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105351 2024-07-16 11:16:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383
2384
2385/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2386FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2392 {
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /* XMM, greg64 */
2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2399 IEM_MC_PREPARE_AVX_USAGE();
2400
2401 IEM_MC_LOCAL(X86XMMREG, uDst);
2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2408 puDst, puSrc1, pi64Src2);
2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_ADVANCE_RIP_AND_FINISH();
2413 IEM_MC_END();
2414 }
2415 else
2416 {
2417 /* XMM, [mem64] */
2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_PREPARE_AVX_USAGE();
2424
2425 IEM_MC_LOCAL(X86XMMREG, uDst);
2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2429 IEM_MC_LOCAL(int64_t, i64Src2);
2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2433 puDst, puSrc1, pi64Src2);
2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440 }
2441 else
2442 {
2443 if (IEM_IS_MODRM_REG_MODE(bRm))
2444 {
2445 /* XMM, greg32 */
2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2449 IEM_MC_PREPARE_AVX_USAGE();
2450
2451 IEM_MC_LOCAL(X86XMMREG, uDst);
2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2458 puDst, puSrc1, pi32Src2);
2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465 else
2466 {
2467 /* XMM, [mem32] */
2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474
2475 IEM_MC_LOCAL(X86XMMREG, uDst);
2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2479 IEM_MC_LOCAL(int32_t, i32Src2);
2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2483 puDst, puSrc1, pi32Src2);
2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 }
2491}
2492
2493
2494/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2495FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2496{
2497 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2498 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2501 {
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /* XMM, greg64 */
2505 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2506 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2508 IEM_MC_PREPARE_AVX_USAGE();
2509
2510 IEM_MC_LOCAL(X86XMMREG, uDst);
2511 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2514 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2515 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2516 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2517 puDst, puSrc1, pi64Src2);
2518 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2519 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2520 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524 else
2525 {
2526 /* XMM, [mem64] */
2527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2530 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2532 IEM_MC_PREPARE_AVX_USAGE();
2533
2534 IEM_MC_LOCAL(X86XMMREG, uDst);
2535 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2538 IEM_MC_LOCAL(int64_t, i64Src2);
2539 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2540 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2541 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2542 puDst, puSrc1, pi64Src2);
2543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2545 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 }
2550 else
2551 {
2552 if (IEM_IS_MODRM_REG_MODE(bRm))
2553 {
2554 /* XMM, greg32 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_PREPARE_AVX_USAGE();
2559
2560 IEM_MC_LOCAL(X86XMMREG, uDst);
2561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2562 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2564 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2565 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2567 puDst, puSrc1, pi32Src2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2570 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /* XMM, [mem32] */
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2582 IEM_MC_PREPARE_AVX_USAGE();
2583
2584 IEM_MC_LOCAL(X86XMMREG, uDst);
2585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2586 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2587 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2588 IEM_MC_LOCAL(int32_t, i32Src2);
2589 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2590 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2591 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2592 puDst, puSrc1, pi32Src2);
2593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2594 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2595 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_ADVANCE_RIP_AND_FINISH();
2597 IEM_MC_END();
2598 }
2599 }
2600}
2601
2602
2603/**
2604 * @opcode 0x2b
2605 * @opcodesub !11 mr/reg
2606 * @oppfx none
2607 * @opcpuid avx
2608 * @opgroup og_avx_cachect
2609 * @opxcpttype 1
2610 * @optest op1=1 op2=2 -> op1=2
2611 * @optest op1=0 op2=-42 -> op1=-42
2612 * @note Identical implementation to vmovntpd
2613 */
2614FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2615{
2616 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2617 Assert(pVCpu->iem.s.uVexLength <= 1);
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if (IEM_IS_MODRM_MEM_MODE(bRm))
2620 {
2621 /*
2622 * memory, register.
2623 */
2624 if (pVCpu->iem.s.uVexLength == 0)
2625 {
2626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2629
2630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2633 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2636 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2644 IEM_MC_LOCAL(RTUINT256U, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2651
2652 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 }
2659 /* The register, register encoding is invalid. */
2660 else
2661 IEMOP_RAISE_INVALID_OPCODE_RET();
2662}
2663
2664/**
2665 * @opcode 0x2b
2666 * @opcodesub !11 mr/reg
2667 * @oppfx 0x66
2668 * @opcpuid avx
2669 * @opgroup og_avx_cachect
2670 * @opxcpttype 1
2671 * @optest op1=1 op2=2 -> op1=2
2672 * @optest op1=0 op2=-42 -> op1=-42
2673 * @note Identical implementation to vmovntps
2674 */
2675FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2676{
2677 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2678 Assert(pVCpu->iem.s.uVexLength <= 1);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_MEM_MODE(bRm))
2681 {
2682 /*
2683 * memory, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690
2691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2695
2696 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2697 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2712
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2715
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* The register, register encoding is invalid. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725/**
2726 * @opmnemonic udvexf30f2b
2727 * @opcode 0x2b
2728 * @oppfx 0xf3
2729 * @opunused vex.modrm
2730 * @opcpuid avx
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/**
2736 * @opmnemonic udvexf20f2b
2737 * @opcode 0x2b
2738 * @oppfx 0xf2
2739 * @opunused vex.modrm
2740 * @opcpuid avx
2741 * @optest ->
2742 * @opdone
2743 */
2744
2745
2746/* Opcode VEX.0F 0x2c - invalid */
2747/* Opcode VEX.66.0F 0x2c - invalid */
2748
2749#define IEMOP_VCVTXSS2SI_Gy_Wss_BODY(a_Instr) \
2750 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2753 { \
2754 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2755 { \
2756 /* greg64, XMM */ \
2757 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2758 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2760 IEM_MC_PREPARE_AVX_USAGE(); \
2761 IEM_MC_LOCAL( int64_t, i64Dst); \
2762 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2763 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2764 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2765 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2766 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2767 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2768 pi64Dst, pr32Src); \
2769 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2770 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2772 IEM_MC_END(); \
2773 } \
2774 else \
2775 { \
2776 /* greg64, [mem64] */ \
2777 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2782 IEM_MC_PREPARE_AVX_USAGE(); \
2783 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2784 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2785 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2786 IEM_MC_LOCAL( int64_t, i64Dst); \
2787 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2788 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2789 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2790 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2791 pi64Dst, pr32Src); \
2792 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2793 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2795 IEM_MC_END(); \
2796 } \
2797 } \
2798 else \
2799 { \
2800 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2801 { \
2802 /* greg, XMM */ \
2803 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2804 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2806 IEM_MC_PREPARE_AVX_USAGE(); \
2807 IEM_MC_LOCAL( int32_t, i32Dst); \
2808 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2809 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2810 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2812 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2813 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2814 pi32Dst, pr32Src); \
2815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2816 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2818 IEM_MC_END(); \
2819 } \
2820 else \
2821 { \
2822 /* greg, [mem] */ \
2823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2826 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2828 IEM_MC_PREPARE_AVX_USAGE(); \
2829 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2830 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2831 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2832 IEM_MC_LOCAL( int32_t, i32Dst); \
2833 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2834 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2835 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2836 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2837 pi32Dst, pr32Src); \
2838 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2839 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2841 IEM_MC_END(); \
2842 } \
2843 } \
2844 (void)0
2845
2846
2847#define IEMOP_VCVTXSD2SI_Gy_Wss_BODY(a_Instr) \
2848 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2850 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2851 { \
2852 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2853 { \
2854 /* greg64, XMM */ \
2855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2856 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2857 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2858 IEM_MC_PREPARE_AVX_USAGE(); \
2859 IEM_MC_LOCAL( int64_t, i64Dst); \
2860 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2861 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2862 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2863 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2864 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2865 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2866 pi64Dst, pr64Src); \
2867 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2868 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2870 IEM_MC_END(); \
2871 } \
2872 else \
2873 { \
2874 /* greg64, [mem64] */ \
2875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2878 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2880 IEM_MC_PREPARE_AVX_USAGE(); \
2881 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2882 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2883 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2884 IEM_MC_LOCAL( int64_t, i64Dst); \
2885 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2886 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2887 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2888 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2889 pi64Dst, pr64Src); \
2890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2891 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2893 IEM_MC_END(); \
2894 } \
2895 } \
2896 else \
2897 { \
2898 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2899 { \
2900 /* greg, XMM */ \
2901 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2903 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2904 IEM_MC_PREPARE_AVX_USAGE(); \
2905 IEM_MC_LOCAL( int32_t, i32Dst); \
2906 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2907 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2908 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2909 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2910 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2911 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2912 pi32Dst, pr64Src); \
2913 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2914 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2916 IEM_MC_END(); \
2917 } \
2918 else \
2919 { \
2920 /* greg, [mem] */ \
2921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2924 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2926 IEM_MC_PREPARE_AVX_USAGE(); \
2927 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2929 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2930 IEM_MC_LOCAL( int32_t, i32Dst); \
2931 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2932 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2933 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2934 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2935 pi32Dst, pr64Src); \
2936 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2937 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2939 IEM_MC_END(); \
2940 } \
2941 } \
2942 (void)0
2943
2944
2945/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2946FNIEMOP_DEF(iemOp_vcvttss2si_Gy_Wss)
2947{
2948 IEMOP_MNEMONIC2(VEX_RM, VCVTTSS2SI, vcvttss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvttss2si);
2950}
2951
2952
2953/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2954FNIEMOP_DEF(iemOp_vcvttsd2si_Gy_Wsd)
2955{
2956 IEMOP_MNEMONIC2(VEX_RM, VCVTTSD2SI, vcvttsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2957 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvttsd2si);
2958}
2959
2960
2961/* Opcode VEX.0F 0x2d - invalid */
2962/* Opcode VEX.66.0F 0x2d - invalid */
2963
2964
2965/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2966FNIEMOP_DEF(iemOp_vcvtss2si_Gy_Wss)
2967{
2968 IEMOP_MNEMONIC2(VEX_RM, VCVTSS2SI, vcvtss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2969 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvtss2si);
2970}
2971
2972
2973/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2974FNIEMOP_DEF(iemOp_vcvtsd2si_Gy_Wsd)
2975{
2976 IEMOP_MNEMONIC2(VEX_RM, VCVTSD2SI, vcvtsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2977 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvtsd2si);
2978}
2979
2980
2981
2982/**
2983 * @opcode 0x2e
2984 * @oppfx none
2985 * @opflmodify cf,pf,af,zf,sf,of
2986 * @opflclear af,sf,of
2987 */
2988FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2989{
2990 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2992 if (IEM_IS_MODRM_REG_MODE(bRm))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2999 IEM_MC_LOCAL(uint32_t, fEFlags);
3000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3001 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3002 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3003 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3004 IEM_MC_PREPARE_AVX_USAGE();
3005 IEM_MC_FETCH_EFLAGS(fEFlags);
3006 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3007 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3008 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3009 pEFlags, uSrc1, uSrc2);
3010 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3011 IEM_MC_COMMIT_EFLAGS(fEFlags);
3012
3013 IEM_MC_ADVANCE_RIP_AND_FINISH();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /*
3019 * Register, memory.
3020 */
3021 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3022 IEM_MC_LOCAL(uint32_t, fEFlags);
3023 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3024 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3025 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3031 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3032
3033 IEM_MC_PREPARE_AVX_USAGE();
3034 IEM_MC_FETCH_EFLAGS(fEFlags);
3035 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3036 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3037 pEFlags, uSrc1, uSrc2);
3038 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3039 IEM_MC_COMMIT_EFLAGS(fEFlags);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044}
3045
3046
3047/**
3048 * @opcode 0x2e
3049 * @oppfx 0x66
3050 * @opflmodify cf,pf,af,zf,sf,of
3051 * @opflclear af,sf,of
3052 */
3053FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
3054{
3055 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3064 IEM_MC_LOCAL(uint32_t, fEFlags);
3065 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3066 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3067 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3069 IEM_MC_PREPARE_AVX_USAGE();
3070 IEM_MC_FETCH_EFLAGS(fEFlags);
3071 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3072 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3073 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3074 pEFlags, uSrc1, uSrc2);
3075 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3076 IEM_MC_COMMIT_EFLAGS(fEFlags);
3077
3078 IEM_MC_ADVANCE_RIP_AND_FINISH();
3079 IEM_MC_END();
3080 }
3081 else
3082 {
3083 /*
3084 * Register, memory.
3085 */
3086 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3087 IEM_MC_LOCAL(uint32_t, fEFlags);
3088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3089 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3090 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3094 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3096 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097
3098 IEM_MC_PREPARE_AVX_USAGE();
3099 IEM_MC_FETCH_EFLAGS(fEFlags);
3100 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3101 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3102 pEFlags, uSrc1, uSrc2);
3103 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3104 IEM_MC_COMMIT_EFLAGS(fEFlags);
3105
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 }
3109}
3110
3111
3112/* Opcode VEX.F3.0F 0x2e - invalid */
3113/* Opcode VEX.F2.0F 0x2e - invalid */
3114
3115/**
3116 * @opcode 0x2f
3117 * @oppfx none
3118 * @opflmodify cf,pf,af,zf,sf,of
3119 * @opflclear af,sf,of
3120 */
3121FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
3122{
3123 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_REG_MODE(bRm))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3131 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3132 IEM_MC_LOCAL(uint32_t, fEFlags);
3133 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3134 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3135 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3137 IEM_MC_PREPARE_AVX_USAGE();
3138 IEM_MC_FETCH_EFLAGS(fEFlags);
3139 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3140 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3141 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3142 pEFlags, uSrc1, uSrc2);
3143 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3144 IEM_MC_COMMIT_EFLAGS(fEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /*
3152 * Register, memory.
3153 */
3154 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3155 IEM_MC_LOCAL(uint32_t, fEFlags);
3156 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3157 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3158 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3164 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3165
3166 IEM_MC_PREPARE_AVX_USAGE();
3167 IEM_MC_FETCH_EFLAGS(fEFlags);
3168 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3170 pEFlags, uSrc1, uSrc2);
3171 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3172 IEM_MC_COMMIT_EFLAGS(fEFlags);
3173
3174 IEM_MC_ADVANCE_RIP_AND_FINISH();
3175 IEM_MC_END();
3176 }
3177}
3178
3179
3180/**
3181 * @opcode 0x2f
3182 * @oppfx 0x66
3183 * @opflmodify cf,pf,af,zf,sf,of
3184 * @opflclear af,sf,of
3185 */
3186FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
3187{
3188 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_REG_MODE(bRm))
3191 {
3192 /*
3193 * Register, register.
3194 */
3195 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_LOCAL(uint32_t, fEFlags);
3198 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3199 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3200 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3202 IEM_MC_PREPARE_AVX_USAGE();
3203 IEM_MC_FETCH_EFLAGS(fEFlags);
3204 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3205 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3206 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3207 pEFlags, uSrc1, uSrc2);
3208 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3209 IEM_MC_COMMIT_EFLAGS(fEFlags);
3210
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /*
3217 * Register, memory.
3218 */
3219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3220 IEM_MC_LOCAL(uint32_t, fEFlags);
3221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3222 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3223 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3225
3226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3227 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3229 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230
3231 IEM_MC_PREPARE_AVX_USAGE();
3232 IEM_MC_FETCH_EFLAGS(fEFlags);
3233 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3234 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3235 pEFlags, uSrc1, uSrc2);
3236 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3237 IEM_MC_COMMIT_EFLAGS(fEFlags);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/* Opcode VEX.F3.0F 0x2f - invalid */
3246/* Opcode VEX.F2.0F 0x2f - invalid */
3247
3248/* Opcode VEX.0F 0x30 - invalid */
3249/* Opcode VEX.0F 0x31 - invalid */
3250/* Opcode VEX.0F 0x32 - invalid */
3251/* Opcode VEX.0F 0x33 - invalid */
3252/* Opcode VEX.0F 0x34 - invalid */
3253/* Opcode VEX.0F 0x35 - invalid */
3254/* Opcode VEX.0F 0x36 - invalid */
3255/* Opcode VEX.0F 0x37 - invalid */
3256/* Opcode VEX.0F 0x38 - invalid */
3257/* Opcode VEX.0F 0x39 - invalid */
3258/* Opcode VEX.0F 0x3a - invalid */
3259/* Opcode VEX.0F 0x3b - invalid */
3260/* Opcode VEX.0F 0x3c - invalid */
3261/* Opcode VEX.0F 0x3d - invalid */
3262/* Opcode VEX.0F 0x3e - invalid */
3263/* Opcode VEX.0F 0x3f - invalid */
3264/* Opcode VEX.0F 0x40 - invalid */
3265/* Opcode VEX.0F 0x41 - invalid */
3266/* Opcode VEX.0F 0x42 - invalid */
3267/* Opcode VEX.0F 0x43 - invalid */
3268/* Opcode VEX.0F 0x44 - invalid */
3269/* Opcode VEX.0F 0x45 - invalid */
3270/* Opcode VEX.0F 0x46 - invalid */
3271/* Opcode VEX.0F 0x47 - invalid */
3272/* Opcode VEX.0F 0x48 - invalid */
3273/* Opcode VEX.0F 0x49 - invalid */
3274/* Opcode VEX.0F 0x4a - invalid */
3275/* Opcode VEX.0F 0x4b - invalid */
3276/* Opcode VEX.0F 0x4c - invalid */
3277/* Opcode VEX.0F 0x4d - invalid */
3278/* Opcode VEX.0F 0x4e - invalid */
3279/* Opcode VEX.0F 0x4f - invalid */
3280
3281
3282/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3283FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3284{
3285 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 if (IEM_IS_MODRM_REG_MODE(bRm))
3288 {
3289 /*
3290 * Register, register.
3291 */
3292 if (pVCpu->iem.s.uVexLength == 0)
3293 {
3294 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3295 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3296 IEM_MC_LOCAL(uint8_t, u8Dst);
3297 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3298 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3300 IEM_MC_PREPARE_AVX_USAGE();
3301 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3302 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3303 pu8Dst, puSrc);
3304 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 IEM_MC_END();
3307 }
3308 else
3309 {
3310 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3312 IEM_MC_LOCAL(uint8_t, u8Dst);
3313 IEM_MC_LOCAL(RTUINT256U, uSrc);
3314 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3315 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3316
3317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3318 IEM_MC_PREPARE_AVX_USAGE();
3319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3320 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3321 pu8Dst, puSrc);
3322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 IEM_MC_END();
3325 }
3326 }
3327 /* No memory operand. */
3328 else
3329 IEMOP_RAISE_INVALID_OPCODE_RET();
3330}
3331
3332
3333/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3334FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3335{
3336 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3338 if (IEM_IS_MODRM_REG_MODE(bRm))
3339 {
3340 /*
3341 * Register, register.
3342 */
3343 if (pVCpu->iem.s.uVexLength == 0)
3344 {
3345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3347 IEM_MC_LOCAL(uint8_t, u8Dst);
3348 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3349 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_PREPARE_AVX_USAGE();
3352 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3353 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3354 pu8Dst, puSrc);
3355 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359 else
3360 {
3361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3363 IEM_MC_LOCAL(uint8_t, u8Dst);
3364 IEM_MC_LOCAL(RTUINT256U, uSrc);
3365 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3366 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3367
3368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3369 IEM_MC_PREPARE_AVX_USAGE();
3370 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3371 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3372 pu8Dst, puSrc);
3373 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 IEM_MC_END();
3376 }
3377 }
3378 /* No memory operand. */
3379 else
3380 IEMOP_RAISE_INVALID_OPCODE_RET();
3381}
3382
3383
3384/* Opcode VEX.F3.0F 0x50 - invalid */
3385/* Opcode VEX.F2.0F 0x50 - invalid */
3386
3387/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3388FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3389{
3390 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3391 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3392 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3393}
3394
3395
3396/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3397FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3398{
3399 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3400 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3402}
3403
3404
3405/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3406FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3407{
3408 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3409 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3410 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3411}
3412
3413
3414/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3415FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3416{
3417 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3418 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3419 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3420}
3421
3422
3423/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3424FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3427 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3429}
3430
3431
3432/* Opcode VEX.66.0F 0x52 - invalid */
3433
3434
3435/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3436FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3437{
3438 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3439 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3440 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3441}
3442
3443
3444/* Opcode VEX.F2.0F 0x52 - invalid */
3445
3446
3447/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3448FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3449{
3450 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3451 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3453}
3454
3455
3456/* Opcode VEX.66.0F 0x53 - invalid */
3457
3458
3459/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3460FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3461{
3462 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3463 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3464 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3465}
3466
3467
3468/* Opcode VEX.F2.0F 0x53 - invalid */
3469
3470
3471/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3472FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3473{
3474 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3475 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3476 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3477}
3478
3479
3480/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3481FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3482{
3483 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3485 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3486}
3487
3488
3489/* Opcode VEX.F3.0F 0x54 - invalid */
3490/* Opcode VEX.F2.0F 0x54 - invalid */
3491
3492
3493/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3494FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3495{
3496 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3497 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3498 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3499}
3500
3501
3502/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3503FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3504{
3505 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3506 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3507 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3508}
3509
3510
3511/* Opcode VEX.F3.0F 0x55 - invalid */
3512/* Opcode VEX.F2.0F 0x55 - invalid */
3513
3514/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3515FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3516{
3517 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3519 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3520}
3521
3522
3523/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3524FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3525{
3526 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3528 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3529}
3530
3531
3532/* Opcode VEX.F3.0F 0x56 - invalid */
3533/* Opcode VEX.F2.0F 0x56 - invalid */
3534
3535
3536/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3537FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3538{
3539 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3541 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3546FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3547{
3548 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3551}
3552
3553
3554/* Opcode VEX.F3.0F 0x57 - invalid */
3555/* Opcode VEX.F2.0F 0x57 - invalid */
3556
3557
3558/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3559FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3560{
3561 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3562 IEMOPMEDIAF3_INIT_VARS( vaddps);
3563 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3568FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3569{
3570 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3573}
3574
3575
3576/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3577FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3582}
3583
3584
3585/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3586FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3587{
3588 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3589 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3590 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3591}
3592
3593
3594/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3595FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3596{
3597 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3598 IEMOPMEDIAF3_INIT_VARS( vmulps);
3599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3600}
3601
3602
3603/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3604FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3605{
3606 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3607 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3609}
3610
3611
3612/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3613FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3614{
3615 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3616 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3617 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3618}
3619
3620
3621/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3622FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3623{
3624 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3625 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3626 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3627}
3628
3629
3630/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3631FNIEMOP_DEF(iemOp_vcvtps2pd_Vpd_Wps)
3632{
3633 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2PD, vcvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3635 if (IEM_IS_MODRM_REG_MODE(bRm))
3636 {
3637 /*
3638 * Register, register.
3639 */
3640 if (pVCpu->iem.s.uVexLength)
3641 {
3642 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3643 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3644 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3645 IEM_MC_PREPARE_AVX_USAGE();
3646
3647 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3648 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3649 IEM_MC_LOCAL( X86YMMREG, uDst);
3650 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3651 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3652 iemAImpl_vcvtps2pd_u256_u128,
3653 iemAImpl_vcvtps2pd_u256_u128_fallback),
3654 puDst, puSrc);
3655 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3656 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3665 IEM_MC_PREPARE_AVX_USAGE();
3666
3667 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
3668 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3669 IEM_MC_LOCAL( X86XMMREG, uDst);
3670 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3671 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3672 iemAImpl_vcvtps2pd_u128_u64,
3673 iemAImpl_vcvtps2pd_u128_u64_fallback),
3674 puDst, pu64Src);
3675 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3676 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3677 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_ADVANCE_RIP_AND_FINISH();
3679 IEM_MC_END();
3680 }
3681 }
3682 else
3683 {
3684 /*
3685 * Register, memory.
3686 */
3687 if (pVCpu->iem.s.uVexLength)
3688 {
3689 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3694 IEM_MC_PREPARE_AVX_USAGE();
3695
3696 IEM_MC_LOCAL(X86XMMREG, uSrc);
3697 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3698 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3699 IEM_MC_LOCAL(X86YMMREG, uDst);
3700 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3701 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3702 iemAImpl_vcvtps2pd_u256_u128,
3703 iemAImpl_vcvtps2pd_u256_u128_fallback),
3704 puDst, puSrc);
3705 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3707 IEM_MC_ADVANCE_RIP_AND_FINISH();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3715 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3716 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3717 IEM_MC_PREPARE_AVX_USAGE();
3718
3719 IEM_MC_LOCAL( uint64_t, u64Src);
3720 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722 IEM_MC_LOCAL( X86XMMREG, uDst);
3723 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3724 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3725 iemAImpl_vcvtps2pd_u128_u64,
3726 iemAImpl_vcvtps2pd_u128_u64_fallback),
3727 puDst, pu64Src);
3728 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3729 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3730 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3731 IEM_MC_ADVANCE_RIP_AND_FINISH();
3732 IEM_MC_END();
3733 }
3734 }
3735}
3736
3737
3738/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3739FNIEMOP_DEF(iemOp_vcvtpd2ps_Vps_Wpd)
3740{
3741 IEMOP_MNEMONIC2(VEX_RM, VCVTPD2PS, vcvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3743 if (IEM_IS_MODRM_REG_MODE(bRm))
3744 {
3745 /*
3746 * Register, register.
3747 */
3748 if (pVCpu->iem.s.uVexLength)
3749 {
3750 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3751 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3752 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3753 IEM_MC_PREPARE_AVX_USAGE();
3754
3755 IEM_MC_LOCAL( X86YMMREG, uSrc);
3756 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3757 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3758 IEM_MC_LOCAL( X86XMMREG, uDst);
3759 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3760 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3761 iemAImpl_vcvtpd2ps_u128_u256,
3762 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3763 puDst, puSrc);
3764 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3765 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3766 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3767 IEM_MC_ADVANCE_RIP_AND_FINISH();
3768 IEM_MC_END();
3769 }
3770 else
3771 {
3772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3774 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3775 IEM_MC_PREPARE_AVX_USAGE();
3776
3777 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3778 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3779 IEM_MC_LOCAL( X86XMMREG, uDst);
3780 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3781 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3782 iemAImpl_vcvtpd2ps_u128_u128,
3783 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3784 puDst, puSrc);
3785 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3786 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3787 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3788 IEM_MC_ADVANCE_RIP_AND_FINISH();
3789 IEM_MC_END();
3790 }
3791 }
3792 else
3793 {
3794 /*
3795 * Register, memory.
3796 */
3797 if (pVCpu->iem.s.uVexLength)
3798 {
3799 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3802 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3803 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3804 IEM_MC_PREPARE_AVX_USAGE();
3805
3806 IEM_MC_LOCAL( X86YMMREG, uSrc);
3807 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3808 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3809 IEM_MC_LOCAL( X86XMMREG, uDst);
3810 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3812 iemAImpl_vcvtpd2ps_u128_u256,
3813 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3814 puDst, puSrc);
3815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3816 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3817 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3826 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3828 IEM_MC_PREPARE_AVX_USAGE();
3829
3830 IEM_MC_LOCAL(X86XMMREG, uSrc);
3831 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3832 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3833 IEM_MC_LOCAL( X86XMMREG, uDst);
3834 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3835 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3836 iemAImpl_vcvtpd2ps_u128_u128,
3837 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3838 puDst, puSrc);
3839 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3840 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3841 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3842 IEM_MC_ADVANCE_RIP_AND_FINISH();
3843 IEM_MC_END();
3844 }
3845 }
3846}
3847
3848
3849/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3850FNIEMOP_DEF(iemOp_vcvtss2sd_Vsd_Hx_Wss)
3851{
3852 IEMOP_MNEMONIC3(VEX_RVM, VCVTSS2SD, vcvtss2sd, Vsd, Hx, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3853 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3854 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback));
3855}
3856
3857
3858/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3859FNIEMOP_DEF(iemOp_vcvtsd2ss_Vss_Hx_Wsd)
3860{
3861 IEMOP_MNEMONIC3(VEX_RVM, VCVTSD2SS, vcvtsd2ss, Vss, Hx, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3862 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3863 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback));
3864}
3865
3866
3867/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3868FNIEMOP_DEF(iemOp_vcvtdq2ps_Vps_Wdq)
3869{
3870 IEMOP_MNEMONIC2(VEX_RM, VCVTDQ2PS, vcvtdq2ps, Vps, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3871 IEMOPMEDIAF2_INIT_VARS( vcvtdq2ps);
3872 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3873}
3874
3875
3876/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3877FNIEMOP_DEF(iemOp_vcvtps2dq_Vdq_Wps)
3878{
3879 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2DQ, vcvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3880 IEMOPMEDIAF2_INIT_VARS( vcvtps2dq);
3881 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3882}
3883
3884
3885/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3886FNIEMOP_DEF(iemOp_vcvttps2dq_Vdq_Wps)
3887{
3888 IEMOP_MNEMONIC2(VEX_RM, VCVTTPS2DQ, vcvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3889 IEMOPMEDIAF2_INIT_VARS( vcvttps2dq);
3890 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3891}
3892
3893
3894/* Opcode VEX.F2.0F 0x5b - invalid */
3895
3896
3897/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3898FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3899{
3900 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3901 IEMOPMEDIAF3_INIT_VARS( vsubps);
3902 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3903}
3904
3905
3906/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3907FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3908{
3909 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3910 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3911 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3912}
3913
3914
3915/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3916FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3917{
3918 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3919 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3920 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3921}
3922
3923
3924/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3925FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3926{
3927 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3928 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3929 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3930}
3931
3932
3933/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3934FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3935{
3936 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3937 IEMOPMEDIAF3_INIT_VARS( vminps);
3938 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3939}
3940
3941
3942/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3943FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3944{
3945 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3946 IEMOPMEDIAF3_INIT_VARS( vminpd);
3947 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3948}
3949
3950
3951/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3952FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3953{
3954 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3955 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3956 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3957}
3958
3959
3960/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3961FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3962{
3963 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3964 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3965 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3966}
3967
3968
3969/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3970FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3971{
3972 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3973 IEMOPMEDIAF3_INIT_VARS( vdivps);
3974 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3975}
3976
3977
3978/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3979FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3980{
3981 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3982 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3983 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3984}
3985
3986
3987/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3988FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3989{
3990 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3991 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3992 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3993}
3994
3995
3996/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3997FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3998{
3999 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4000 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
4001 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
4002}
4003
4004
4005/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
4006FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
4007{
4008 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4009 IEMOPMEDIAF3_INIT_VARS( vmaxps);
4010 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4011}
4012
4013
4014/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
4015FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
4016{
4017 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4018 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
4019 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4020}
4021
4022
4023/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
4024FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
4025{
4026 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4027 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
4028 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
4029}
4030
4031
4032/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
4033FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
4034{
4035 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4036 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
4037 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
4038}
4039
4040
4041/* Opcode VEX.0F 0x60 - invalid */
4042
4043
4044/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
4045FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
4046{
4047 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4048 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
4049 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4050}
4051
4052
4053/* Opcode VEX.F3.0F 0x60 - invalid */
4054
4055
4056/* Opcode VEX.0F 0x61 - invalid */
4057
4058
4059/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
4060FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
4061{
4062 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4063 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
4064 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4065}
4066
4067
4068/* Opcode VEX.F3.0F 0x61 - invalid */
4069
4070
4071/* Opcode VEX.0F 0x62 - invalid */
4072
4073/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
4074FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
4075{
4076 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4077 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
4078 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4079}
4080
4081
4082/* Opcode VEX.F3.0F 0x62 - invalid */
4083
4084
4085
4086/* Opcode VEX.0F 0x63 - invalid */
4087
4088
4089/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
4090FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
4091{
4092 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4093 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
4094 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4095}
4096
4097
4098/* Opcode VEX.F3.0F 0x63 - invalid */
4099
4100/* Opcode VEX.0F 0x64 - invalid */
4101
4102
4103/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
4104FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
4105{
4106 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4107 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
4108 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4109}
4110
4111
4112/* Opcode VEX.F3.0F 0x64 - invalid */
4113
4114/* Opcode VEX.0F 0x65 - invalid */
4115
4116
4117/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
4118FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
4119{
4120 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4121 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
4122 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4123}
4124
4125
4126/* Opcode VEX.F3.0F 0x65 - invalid */
4127
4128/* Opcode VEX.0F 0x66 - invalid */
4129
4130
4131/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
4132FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
4133{
4134 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4135 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
4136 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4137}
4138
4139
4140/* Opcode VEX.F3.0F 0x66 - invalid */
4141
4142/* Opcode VEX.0F 0x67 - invalid */
4143
4144
4145/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
4146FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
4147{
4148 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4149 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
4150 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4151}
4152
4153
4154/* Opcode VEX.F3.0F 0x67 - invalid */
4155
4156
4157///**
4158// * Common worker for SSE2 instructions on the form:
4159// * pxxxx xmm1, xmm2/mem128
4160// *
4161// * The 2nd operand is the second half of a register, which in the memory case
4162// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
4163// * where it may read the full 128 bits or only the upper 64 bits.
4164// *
4165// * Exceptions type 4.
4166// */
4167//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
4168//{
4169// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4170// if (IEM_IS_MODRM_REG_MODE(bRm))
4171// {
4172// /*
4173// * Register, register.
4174// */
4175// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4176// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4178// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4179// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4180// IEM_MC_PREPARE_SSE_USAGE();
4181// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4182// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4183// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4184// IEM_MC_ADVANCE_RIP_AND_FINISH();
4185// IEM_MC_END();
4186// }
4187// else
4188// {
4189// /*
4190// * Register, memory.
4191// */
4192// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4193// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4194// IEM_MC_LOCAL(RTUINT128U, uSrc);
4195// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4196// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197//
4198// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
4202//
4203// IEM_MC_PREPARE_SSE_USAGE();
4204// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4205// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4206//
4207// IEM_MC_ADVANCE_RIP_AND_FINISH();
4208// IEM_MC_END();
4209// }
4210// return VINF_SUCCESS;
4211//}
4212
4213
4214/* Opcode VEX.0F 0x68 - invalid */
4215
4216/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
4217FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
4218{
4219 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4220 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
4221 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4222}
4223
4224
4225/* Opcode VEX.F3.0F 0x68 - invalid */
4226
4227
4228/* Opcode VEX.0F 0x69 - invalid */
4229
4230
4231/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
4232FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
4233{
4234 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4235 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
4236 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4237}
4238
4239
4240/* Opcode VEX.F3.0F 0x69 - invalid */
4241
4242
4243/* Opcode VEX.0F 0x6a - invalid */
4244
4245
4246/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
4247FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
4248{
4249 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4250 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
4251 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4252}
4253
4254
4255/* Opcode VEX.F3.0F 0x6a - invalid */
4256
4257
4258/* Opcode VEX.0F 0x6b - invalid */
4259
4260
4261/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
4262FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
4263{
4264 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4265 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
4266 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4267}
4268
4269
4270/* Opcode VEX.F3.0F 0x6b - invalid */
4271
4272
4273/* Opcode VEX.0F 0x6c - invalid */
4274
4275
4276/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
4277FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
4278{
4279 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4280 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
4281 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4282}
4283
4284
4285/* Opcode VEX.F3.0F 0x6c - invalid */
4286/* Opcode VEX.F2.0F 0x6c - invalid */
4287
4288
4289/* Opcode VEX.0F 0x6d - invalid */
4290
4291
4292/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
4293FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
4294{
4295 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4296 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
4297 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4298}
4299
4300
4301/* Opcode VEX.F3.0F 0x6d - invalid */
4302
4303
4304/* Opcode VEX.0F 0x6e - invalid */
4305
4306FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
4307{
4308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4310 {
4311 /**
4312 * @opcode 0x6e
4313 * @opcodesub rex.w=1
4314 * @oppfx 0x66
4315 * @opcpuid avx
4316 * @opgroup og_avx_simdint_datamov
4317 * @opxcpttype 5
4318 * @optest 64-bit / op1=1 op2=2 -> op1=2
4319 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4320 */
4321 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4322 if (IEM_IS_MODRM_REG_MODE(bRm))
4323 {
4324 /* XMM, greg64 */
4325 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4326 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4327 IEM_MC_LOCAL(uint64_t, u64Tmp);
4328
4329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4331
4332 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4333 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338 else
4339 {
4340 /* XMM, [mem64] */
4341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4343 IEM_MC_LOCAL(uint64_t, u64Tmp);
4344
4345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4346 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4349
4350 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4351 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4352
4353 IEM_MC_ADVANCE_RIP_AND_FINISH();
4354 IEM_MC_END();
4355 }
4356 }
4357 else
4358 {
4359 /**
4360 * @opdone
4361 * @opcode 0x6e
4362 * @opcodesub rex.w=0
4363 * @oppfx 0x66
4364 * @opcpuid avx
4365 * @opgroup og_avx_simdint_datamov
4366 * @opxcpttype 5
4367 * @opfunction iemOp_vmovd_q_Vy_Ey
4368 * @optest op1=1 op2=2 -> op1=2
4369 * @optest op1=0 op2=-42 -> op1=-42
4370 */
4371 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4372 if (IEM_IS_MODRM_REG_MODE(bRm))
4373 {
4374 /* XMM, greg32 */
4375 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4376 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4377 IEM_MC_LOCAL(uint32_t, u32Tmp);
4378
4379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4380 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4381
4382 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4383 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4384
4385 IEM_MC_ADVANCE_RIP_AND_FINISH();
4386 IEM_MC_END();
4387 }
4388 else
4389 {
4390 /* XMM, [mem32] */
4391 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4393 IEM_MC_LOCAL(uint32_t, u32Tmp);
4394
4395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4396 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4397 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4398 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4399
4400 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4401 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4402
4403 IEM_MC_ADVANCE_RIP_AND_FINISH();
4404 IEM_MC_END();
4405 }
4406 }
4407}
4408
4409
4410/* Opcode VEX.F3.0F 0x6e - invalid */
4411
4412
4413/* Opcode VEX.0F 0x6f - invalid */
4414
4415/**
4416 * @opcode 0x6f
4417 * @oppfx 0x66
4418 * @opcpuid avx
4419 * @opgroup og_avx_simdint_datamove
4420 * @opxcpttype 1
4421 * @optest op1=1 op2=2 -> op1=2
4422 * @optest op1=0 op2=-42 -> op1=-42
4423 */
4424FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
4425{
4426 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4427 Assert(pVCpu->iem.s.uVexLength <= 1);
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 if (IEM_IS_MODRM_REG_MODE(bRm))
4430 {
4431 /*
4432 * Register, register.
4433 */
4434 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4436
4437 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4439 if (pVCpu->iem.s.uVexLength == 0)
4440 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4441 IEM_GET_MODRM_RM(pVCpu, bRm));
4442 else
4443 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4444 IEM_GET_MODRM_RM(pVCpu, bRm));
4445 IEM_MC_ADVANCE_RIP_AND_FINISH();
4446 IEM_MC_END();
4447 }
4448 else if (pVCpu->iem.s.uVexLength == 0)
4449 {
4450 /*
4451 * Register, memory128.
4452 */
4453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4454 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4456
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4458 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4460 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4461
4462 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4463 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4464
4465 IEM_MC_ADVANCE_RIP_AND_FINISH();
4466 IEM_MC_END();
4467 }
4468 else
4469 {
4470 /*
4471 * Register, memory256.
4472 */
4473 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4474 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4476
4477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4479 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4480 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4481
4482 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4483 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4484
4485 IEM_MC_ADVANCE_RIP_AND_FINISH();
4486 IEM_MC_END();
4487 }
4488}
4489
4490/**
4491 * @opcode 0x6f
4492 * @oppfx 0xf3
4493 * @opcpuid avx
4494 * @opgroup og_avx_simdint_datamove
4495 * @opxcpttype 4UA
4496 * @optest op1=1 op2=2 -> op1=2
4497 * @optest op1=0 op2=-42 -> op1=-42
4498 */
4499FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4500{
4501 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4502 Assert(pVCpu->iem.s.uVexLength <= 1);
4503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4504 if (IEM_IS_MODRM_REG_MODE(bRm))
4505 {
4506 /*
4507 * Register, register.
4508 */
4509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4511
4512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4514 if (pVCpu->iem.s.uVexLength == 0)
4515 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4516 IEM_GET_MODRM_RM(pVCpu, bRm));
4517 else
4518 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4519 IEM_GET_MODRM_RM(pVCpu, bRm));
4520 IEM_MC_ADVANCE_RIP_AND_FINISH();
4521 IEM_MC_END();
4522 }
4523 else if (pVCpu->iem.s.uVexLength == 0)
4524 {
4525 /*
4526 * Register, memory128.
4527 */
4528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4529 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4531
4532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4533 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4535 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4536
4537 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4538 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543 else
4544 {
4545 /*
4546 * Register, memory256.
4547 */
4548 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4549 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4551
4552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4553 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4554 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4555 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4556
4557 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4558 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4559
4560 IEM_MC_ADVANCE_RIP_AND_FINISH();
4561 IEM_MC_END();
4562 }
4563}
4564
4565
4566/* Opcode VEX.0F 0x70 - invalid */
4567
4568
4569/**
4570 * Common worker for AVX/AVX2 instructions on the forms:
4571 * - vpxxx xmm0, xmm2/mem128, imm8
4572 * - vpxxx ymm0, ymm2/mem256, imm8
4573 *
4574 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4575 */
4576FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4577{
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579 if (IEM_IS_MODRM_REG_MODE(bRm))
4580 {
4581 /*
4582 * Register, register.
4583 */
4584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4585 if (pVCpu->iem.s.uVexLength)
4586 {
4587 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4589 IEM_MC_LOCAL(RTUINT256U, uDst);
4590 IEM_MC_LOCAL(RTUINT256U, uSrc);
4591 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4592 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4595 IEM_MC_PREPARE_AVX_USAGE();
4596 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4597 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4598 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4599 IEM_MC_ADVANCE_RIP_AND_FINISH();
4600 IEM_MC_END();
4601 }
4602 else
4603 {
4604 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4605 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4606 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4607 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4608 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4610 IEM_MC_PREPARE_AVX_USAGE();
4611 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4612 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4613 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4614 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4615 IEM_MC_ADVANCE_RIP_AND_FINISH();
4616 IEM_MC_END();
4617 }
4618 }
4619 else
4620 {
4621 /*
4622 * Register, memory.
4623 */
4624 if (pVCpu->iem.s.uVexLength)
4625 {
4626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4627 IEM_MC_LOCAL(RTUINT256U, uDst);
4628 IEM_MC_LOCAL(RTUINT256U, uSrc);
4629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4630 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4631 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4632
4633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4635 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4636 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4637 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4638 IEM_MC_PREPARE_AVX_USAGE();
4639
4640 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4641 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4642 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4643
4644 IEM_MC_ADVANCE_RIP_AND_FINISH();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4650 IEM_MC_LOCAL(RTUINT128U, uSrc);
4651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4652 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4653 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4654
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4656 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4657 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4658 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4660 IEM_MC_PREPARE_AVX_USAGE();
4661
4662 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4664 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4665 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4666
4667 IEM_MC_ADVANCE_RIP_AND_FINISH();
4668 IEM_MC_END();
4669 }
4670 }
4671}
4672
4673
4674/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4675FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4676{
4677 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4678 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4679 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4680
4681}
4682
4683
4684/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4685FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4686{
4687 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4688 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4689 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4690
4691}
4692
4693
4694/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4695FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4696{
4697 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4698 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4699 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4700}
4701
4702
4703/**
4704 * Common worker(s) for AVX/AVX2 instructions on the forms:
4705 * - vpxxx xmm0, xmm2, imm8
4706 * - vpxxx ymm0, ymm2, imm8
4707 *
4708 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4709 */
4710FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4711{
4712 if (IEM_IS_MODRM_REG_MODE(bRm))
4713 {
4714 /*
4715 * Register, register.
4716 */
4717 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4719 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4720 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4721 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4722 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4724 IEM_MC_PREPARE_AVX_USAGE();
4725 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4726 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4727 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4728 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4729 IEM_MC_ADVANCE_RIP_AND_FINISH();
4730 IEM_MC_END();
4731 }
4732 /* No memory operand. */
4733 else
4734 IEMOP_RAISE_INVALID_OPCODE_RET();
4735}
4736
4737FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4738{
4739 if (IEM_IS_MODRM_REG_MODE(bRm))
4740 {
4741 /*
4742 * Register, register.
4743 */
4744 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4745 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4746 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4747 IEM_MC_LOCAL(RTUINT256U, uDst);
4748 IEM_MC_LOCAL(RTUINT256U, uSrc);
4749 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4750 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4751 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4752 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4753 IEM_MC_PREPARE_AVX_USAGE();
4754 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4755 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4756 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4757 IEM_MC_ADVANCE_RIP_AND_FINISH();
4758 IEM_MC_END();
4759 }
4760 /* No memory operand. */
4761 else
4762 IEMOP_RAISE_INVALID_OPCODE_RET();
4763}
4764
4765
4766/* Opcode VEX.0F 0x71 11/2 - invalid. */
4767/** Opcode VEX.66.0F 0x71 11/2. */
4768FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4769{
4770 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4771 if (pVCpu->iem.s.uVexLength)
4772 {
4773 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4774 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4775 }
4776 else
4777 {
4778 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4779 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4780 }
4781}
4782
4783
4784/* Opcode VEX.0F 0x71 11/4 - invalid */
4785/** Opcode VEX.66.0F 0x71 11/4. */
4786FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4787{
4788 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4789 if (pVCpu->iem.s.uVexLength)
4790 {
4791 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4792 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4793 }
4794 else
4795 {
4796 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4797 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4798 }
4799}
4800
4801/* Opcode VEX.0F 0x71 11/6 - invalid */
4802
4803/** Opcode VEX.66.0F 0x71 11/6. */
4804FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4805{
4806 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4807 if (pVCpu->iem.s.uVexLength)
4808 {
4809 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4811 }
4812 else
4813 {
4814 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4815 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4816 }
4817}
4818
4819
4820/**
4821 * VEX Group 12 jump table for register variant.
4822 */
4823IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4824{
4825 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4826 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4827 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4828 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4829 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4830 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4831 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4832 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4833};
4834AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4835
4836
4837/** Opcode VEX.0F 0x71. */
4838FNIEMOP_DEF(iemOp_VGrp12)
4839{
4840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4841 if (IEM_IS_MODRM_REG_MODE(bRm))
4842 /* register, register */
4843 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4844 + pVCpu->iem.s.idxPrefix], bRm);
4845 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4846}
4847
4848
4849/* Opcode VEX.0F 0x72 11/2 - invalid. */
4850/** Opcode VEX.66.0F 0x72 11/2. */
4851FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4852{
4853 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4854 if (pVCpu->iem.s.uVexLength)
4855 {
4856 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4857 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4858 }
4859 else
4860 {
4861 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4862 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4863 }
4864}
4865
4866
4867/* Opcode VEX.0F 0x72 11/4 - invalid. */
4868/** Opcode VEX.66.0F 0x72 11/4. */
4869FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4870{
4871 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4872 if (pVCpu->iem.s.uVexLength)
4873 {
4874 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4875 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4876 }
4877 else
4878 {
4879 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4880 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4881 }
4882}
4883
4884/* Opcode VEX.0F 0x72 11/6 - invalid. */
4885
4886/** Opcode VEX.66.0F 0x72 11/6. */
4887FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4888{
4889 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4890 if (pVCpu->iem.s.uVexLength)
4891 {
4892 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4893 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4894 }
4895 else
4896 {
4897 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4898 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4899 }
4900}
4901
4902
4903/**
4904 * Group 13 jump table for register variant.
4905 */
4906IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4907{
4908 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4909 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4910 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4911 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4912 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4913 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4914 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4915 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4916};
4917AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4918
4919/** Opcode VEX.0F 0x72. */
4920FNIEMOP_DEF(iemOp_VGrp13)
4921{
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 if (IEM_IS_MODRM_REG_MODE(bRm))
4924 /* register, register */
4925 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4926 + pVCpu->iem.s.idxPrefix], bRm);
4927 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4928}
4929
4930
4931/* Opcode VEX.0F 0x73 11/2 - invalid. */
4932/** Opcode VEX.66.0F 0x73 11/2. */
4933FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4934{
4935 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4936 if (pVCpu->iem.s.uVexLength)
4937 {
4938 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4939 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4940 }
4941 else
4942 {
4943 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4944 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4945 }
4946}
4947
4948
4949/** Opcode VEX.66.0F 0x73 11/3. */
4950FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4951{
4952 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4953 if (pVCpu->iem.s.uVexLength)
4954 {
4955 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4956 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4957 }
4958 else
4959 {
4960 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4961 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4962 }
4963}
4964
4965/* Opcode VEX.0F 0x73 11/6 - invalid. */
4966
4967/** Opcode VEX.66.0F 0x73 11/6. */
4968FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4969{
4970 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4971 if (pVCpu->iem.s.uVexLength)
4972 {
4973 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4974 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4975 }
4976 else
4977 {
4978 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4979 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4980 }
4981}
4982
4983/** Opcode VEX.66.0F 0x73 11/7. */
4984FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4985{
4986 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4987 if (pVCpu->iem.s.uVexLength)
4988 {
4989 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4990 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4991 }
4992 else
4993 {
4994 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4995 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4996 }
4997}
4998
4999/* Opcode VEX.0F 0x73 11/6 - invalid. */
5000
5001/**
5002 * Group 14 jump table for register variant.
5003 */
5004IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
5005{
5006 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5007 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5008 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5009 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5010 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5011 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5012 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5013 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5014};
5015AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
5016
5017
5018/** Opcode VEX.0F 0x73. */
5019FNIEMOP_DEF(iemOp_VGrp14)
5020{
5021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5022 if (IEM_IS_MODRM_REG_MODE(bRm))
5023 /* register, register */
5024 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5025 + pVCpu->iem.s.idxPrefix], bRm);
5026 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5027}
5028
5029
5030/* Opcode VEX.0F 0x74 - invalid */
5031
5032
5033/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
5034FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
5035{
5036 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5037 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
5038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5039}
5040
5041/* Opcode VEX.F3.0F 0x74 - invalid */
5042/* Opcode VEX.F2.0F 0x74 - invalid */
5043
5044
5045/* Opcode VEX.0F 0x75 - invalid */
5046
5047
5048/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
5049FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
5050{
5051 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5052 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
5053 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5054}
5055
5056
5057/* Opcode VEX.F3.0F 0x75 - invalid */
5058/* Opcode VEX.F2.0F 0x75 - invalid */
5059
5060
5061/* Opcode VEX.0F 0x76 - invalid */
5062
5063
5064/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
5065FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
5066{
5067 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5068 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
5069 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5070}
5071
5072
5073/* Opcode VEX.F3.0F 0x76 - invalid */
5074/* Opcode VEX.F2.0F 0x76 - invalid */
5075
5076
5077/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
5078FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
5079{
5080 Assert(pVCpu->iem.s.uVexLength <= 1);
5081 if (pVCpu->iem.s.uVexLength == 0)
5082 {
5083 /*
5084 * 128-bit: vzeroupper
5085 */
5086 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
5087 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5088
5089 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5091 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5092
5093 IEM_MC_CLEAR_YREG_128_UP(0);
5094 IEM_MC_CLEAR_YREG_128_UP(1);
5095 IEM_MC_CLEAR_YREG_128_UP(2);
5096 IEM_MC_CLEAR_YREG_128_UP(3);
5097 IEM_MC_CLEAR_YREG_128_UP(4);
5098 IEM_MC_CLEAR_YREG_128_UP(5);
5099 IEM_MC_CLEAR_YREG_128_UP(6);
5100 IEM_MC_CLEAR_YREG_128_UP(7);
5101
5102 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5103 {
5104 IEM_MC_CLEAR_YREG_128_UP( 8);
5105 IEM_MC_CLEAR_YREG_128_UP( 9);
5106 IEM_MC_CLEAR_YREG_128_UP(10);
5107 IEM_MC_CLEAR_YREG_128_UP(11);
5108 IEM_MC_CLEAR_YREG_128_UP(12);
5109 IEM_MC_CLEAR_YREG_128_UP(13);
5110 IEM_MC_CLEAR_YREG_128_UP(14);
5111 IEM_MC_CLEAR_YREG_128_UP(15);
5112 }
5113
5114 IEM_MC_ADVANCE_RIP_AND_FINISH();
5115 IEM_MC_END();
5116 }
5117 else
5118 {
5119 /*
5120 * 256-bit: vzeroall
5121 */
5122 IEMOP_MNEMONIC(vzeroall, "vzeroall");
5123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5124
5125 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5126 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5127 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5128
5129 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
5130 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
5131 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
5132 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
5133 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
5134 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
5135 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
5136 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
5137 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
5138
5139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5140 {
5141 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
5142 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
5143 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
5144 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
5145 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
5146 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
5147 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
5148 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
5149 }
5150
5151 IEM_MC_ADVANCE_RIP_AND_FINISH();
5152 IEM_MC_END();
5153 }
5154}
5155
5156
5157/* Opcode VEX.66.0F 0x77 - invalid */
5158/* Opcode VEX.F3.0F 0x77 - invalid */
5159/* Opcode VEX.F2.0F 0x77 - invalid */
5160
5161/* Opcode VEX.0F 0x78 - invalid */
5162/* Opcode VEX.66.0F 0x78 - invalid */
5163/* Opcode VEX.F3.0F 0x78 - invalid */
5164/* Opcode VEX.F2.0F 0x78 - invalid */
5165
5166/* Opcode VEX.0F 0x79 - invalid */
5167/* Opcode VEX.66.0F 0x79 - invalid */
5168/* Opcode VEX.F3.0F 0x79 - invalid */
5169/* Opcode VEX.F2.0F 0x79 - invalid */
5170
5171/* Opcode VEX.0F 0x7a - invalid */
5172/* Opcode VEX.66.0F 0x7a - invalid */
5173/* Opcode VEX.F3.0F 0x7a - invalid */
5174/* Opcode VEX.F2.0F 0x7a - invalid */
5175
5176/* Opcode VEX.0F 0x7b - invalid */
5177/* Opcode VEX.66.0F 0x7b - invalid */
5178/* Opcode VEX.F3.0F 0x7b - invalid */
5179/* Opcode VEX.F2.0F 0x7b - invalid */
5180
5181/* Opcode VEX.0F 0x7c - invalid */
5182
5183
5184/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
5185FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
5186{
5187 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5188 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
5189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5190}
5191
5192
5193/* Opcode VEX.F3.0F 0x7c - invalid */
5194
5195
5196/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
5197FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
5198{
5199 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5200 IEMOPMEDIAF3_INIT_VARS( vhaddps);
5201 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5202}
5203
5204
5205/* Opcode VEX.0F 0x7d - invalid */
5206
5207
5208/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
5209FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
5210{
5211 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5212 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
5213 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5214}
5215
5216
5217/* Opcode VEX.F3.0F 0x7d - invalid */
5218
5219
5220/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
5221FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
5222{
5223 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5224 IEMOPMEDIAF3_INIT_VARS( vhsubps);
5225 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5226}
5227
5228
5229/* Opcode VEX.0F 0x7e - invalid */
5230
5231FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
5232{
5233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5235 {
5236 /**
5237 * @opcode 0x7e
5238 * @opcodesub rex.w=1
5239 * @oppfx 0x66
5240 * @opcpuid avx
5241 * @opgroup og_avx_simdint_datamov
5242 * @opxcpttype 5
5243 * @optest 64-bit / op1=1 op2=2 -> op1=2
5244 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5245 */
5246 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5247 if (IEM_IS_MODRM_REG_MODE(bRm))
5248 {
5249 /* greg64, XMM */
5250 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5251 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5252 IEM_MC_LOCAL(uint64_t, u64Tmp);
5253
5254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5256
5257 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 }
5263 else
5264 {
5265 /* [mem64], XMM */
5266 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5268 IEM_MC_LOCAL(uint64_t, u64Tmp);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5271 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5272 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5273 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5274
5275 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5276 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 }
5281 }
5282 else
5283 {
5284 /**
5285 * @opdone
5286 * @opcode 0x7e
5287 * @opcodesub rex.w=0
5288 * @oppfx 0x66
5289 * @opcpuid avx
5290 * @opgroup og_avx_simdint_datamov
5291 * @opxcpttype 5
5292 * @opfunction iemOp_vmovd_q_Vy_Ey
5293 * @optest op1=1 op2=2 -> op1=2
5294 * @optest op1=0 op2=-42 -> op1=-42
5295 */
5296 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5297 if (IEM_IS_MODRM_REG_MODE(bRm))
5298 {
5299 /* greg32, XMM */
5300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5301 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5302 IEM_MC_LOCAL(uint32_t, u32Tmp);
5303
5304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5305 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5306
5307 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5308 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5309
5310 IEM_MC_ADVANCE_RIP_AND_FINISH();
5311 IEM_MC_END();
5312 }
5313 else
5314 {
5315 /* [mem32], XMM */
5316 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5318 IEM_MC_LOCAL(uint32_t, u32Tmp);
5319
5320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5321 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5323 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5324
5325 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5327
5328 IEM_MC_ADVANCE_RIP_AND_FINISH();
5329 IEM_MC_END();
5330 }
5331 }
5332}
5333
5334
5335/**
5336 * @opcode 0x7e
5337 * @oppfx 0xf3
5338 * @opcpuid avx
5339 * @opgroup og_avx_pcksclr_datamove
5340 * @opxcpttype none
5341 * @optest op1=1 op2=2 -> op1=2
5342 * @optest op1=0 op2=-42 -> op1=-42
5343 */
5344FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
5345{
5346 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5348 if (IEM_IS_MODRM_REG_MODE(bRm))
5349 {
5350 /*
5351 * Register, register.
5352 */
5353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5355
5356 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5357 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5358
5359 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
5360 IEM_GET_MODRM_RM(pVCpu, bRm));
5361 IEM_MC_ADVANCE_RIP_AND_FINISH();
5362 IEM_MC_END();
5363 }
5364 else
5365 {
5366 /*
5367 * Memory, register.
5368 */
5369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5370 IEM_MC_LOCAL(uint64_t, uSrc);
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5372
5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5374 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5376 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5377
5378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5379 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5380
5381 IEM_MC_ADVANCE_RIP_AND_FINISH();
5382 IEM_MC_END();
5383 }
5384
5385}
5386/* Opcode VEX.F2.0F 0x7e - invalid */
5387
5388
5389/* Opcode VEX.0F 0x7f - invalid */
5390
5391/**
5392 * @opcode 0x7f
5393 * @oppfx 0x66
5394 * @opcpuid avx
5395 * @opgroup og_avx_simdint_datamove
5396 * @opxcpttype 1
5397 * @optest op1=1 op2=2 -> op1=2
5398 * @optest op1=0 op2=-42 -> op1=-42
5399 */
5400FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
5401{
5402 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5403 Assert(pVCpu->iem.s.uVexLength <= 1);
5404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5405 if (IEM_IS_MODRM_REG_MODE(bRm))
5406 {
5407 /*
5408 * Register, register.
5409 */
5410 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5411 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5412
5413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5415 if (pVCpu->iem.s.uVexLength == 0)
5416 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5417 IEM_GET_MODRM_REG(pVCpu, bRm));
5418 else
5419 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5420 IEM_GET_MODRM_REG(pVCpu, bRm));
5421 IEM_MC_ADVANCE_RIP_AND_FINISH();
5422 IEM_MC_END();
5423 }
5424 else if (pVCpu->iem.s.uVexLength == 0)
5425 {
5426 /*
5427 * Register, memory128.
5428 */
5429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5430 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5432
5433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5437
5438 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5439 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5440
5441 IEM_MC_ADVANCE_RIP_AND_FINISH();
5442 IEM_MC_END();
5443 }
5444 else
5445 {
5446 /*
5447 * Register, memory256.
5448 */
5449 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5450 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5452
5453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5454 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5456 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5457
5458 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5459 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5460
5461 IEM_MC_ADVANCE_RIP_AND_FINISH();
5462 IEM_MC_END();
5463 }
5464}
5465
5466
5467/**
5468 * @opcode 0x7f
5469 * @oppfx 0xf3
5470 * @opcpuid avx
5471 * @opgroup og_avx_simdint_datamove
5472 * @opxcpttype 4UA
5473 * @optest op1=1 op2=2 -> op1=2
5474 * @optest op1=0 op2=-42 -> op1=-42
5475 */
5476FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5477{
5478 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5479 Assert(pVCpu->iem.s.uVexLength <= 1);
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481 if (IEM_IS_MODRM_REG_MODE(bRm))
5482 {
5483 /*
5484 * Register, register.
5485 */
5486 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5487 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5488
5489 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5490 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5491 if (pVCpu->iem.s.uVexLength == 0)
5492 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5493 IEM_GET_MODRM_REG(pVCpu, bRm));
5494 else
5495 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5496 IEM_GET_MODRM_REG(pVCpu, bRm));
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 }
5500 else if (pVCpu->iem.s.uVexLength == 0)
5501 {
5502 /*
5503 * Register, memory128.
5504 */
5505 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5506 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5508
5509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5513
5514 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5515 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5516
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 }
5520 else
5521 {
5522 /*
5523 * Register, memory256.
5524 */
5525 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5526 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5528
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5530 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5532 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5533
5534 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5535 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5536
5537 IEM_MC_ADVANCE_RIP_AND_FINISH();
5538 IEM_MC_END();
5539 }
5540}
5541
5542/* Opcode VEX.F2.0F 0x7f - invalid */
5543
5544
5545/* Opcode VEX.0F 0x80 - invalid */
5546/* Opcode VEX.0F 0x81 - invalid */
5547/* Opcode VEX.0F 0x82 - invalid */
5548/* Opcode VEX.0F 0x83 - invalid */
5549/* Opcode VEX.0F 0x84 - invalid */
5550/* Opcode VEX.0F 0x85 - invalid */
5551/* Opcode VEX.0F 0x86 - invalid */
5552/* Opcode VEX.0F 0x87 - invalid */
5553/* Opcode VEX.0F 0x88 - invalid */
5554/* Opcode VEX.0F 0x89 - invalid */
5555/* Opcode VEX.0F 0x8a - invalid */
5556/* Opcode VEX.0F 0x8b - invalid */
5557/* Opcode VEX.0F 0x8c - invalid */
5558/* Opcode VEX.0F 0x8d - invalid */
5559/* Opcode VEX.0F 0x8e - invalid */
5560/* Opcode VEX.0F 0x8f - invalid */
5561/* Opcode VEX.0F 0x90 - invalid */
5562/* Opcode VEX.0F 0x91 - invalid */
5563/* Opcode VEX.0F 0x92 - invalid */
5564/* Opcode VEX.0F 0x93 - invalid */
5565/* Opcode VEX.0F 0x94 - invalid */
5566/* Opcode VEX.0F 0x95 - invalid */
5567/* Opcode VEX.0F 0x96 - invalid */
5568/* Opcode VEX.0F 0x97 - invalid */
5569/* Opcode VEX.0F 0x98 - invalid */
5570/* Opcode VEX.0F 0x99 - invalid */
5571/* Opcode VEX.0F 0x9a - invalid */
5572/* Opcode VEX.0F 0x9b - invalid */
5573/* Opcode VEX.0F 0x9c - invalid */
5574/* Opcode VEX.0F 0x9d - invalid */
5575/* Opcode VEX.0F 0x9e - invalid */
5576/* Opcode VEX.0F 0x9f - invalid */
5577/* Opcode VEX.0F 0xa0 - invalid */
5578/* Opcode VEX.0F 0xa1 - invalid */
5579/* Opcode VEX.0F 0xa2 - invalid */
5580/* Opcode VEX.0F 0xa3 - invalid */
5581/* Opcode VEX.0F 0xa4 - invalid */
5582/* Opcode VEX.0F 0xa5 - invalid */
5583/* Opcode VEX.0F 0xa6 - invalid */
5584/* Opcode VEX.0F 0xa7 - invalid */
5585/* Opcode VEX.0F 0xa8 - invalid */
5586/* Opcode VEX.0F 0xa9 - invalid */
5587/* Opcode VEX.0F 0xaa - invalid */
5588/* Opcode VEX.0F 0xab - invalid */
5589/* Opcode VEX.0F 0xac - invalid */
5590/* Opcode VEX.0F 0xad - invalid */
5591
5592
5593/* Opcode VEX.0F 0xae mem/0 - invalid. */
5594/* Opcode VEX.0F 0xae mem/1 - invalid. */
5595
5596/**
5597 * @ opmaps grp15
5598 * @ opcode !11/2
5599 * @ oppfx none
5600 * @ opcpuid sse
5601 * @ opgroup og_sse_mxcsrsm
5602 * @ opxcpttype 5
5603 * @ optest op1=0 -> mxcsr=0
5604 * @ optest op1=0x2083 -> mxcsr=0x2083
5605 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5606 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5607 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5608 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5609 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5610 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5611 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5612 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5613 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5614 */
5615FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
5616//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5617//{
5618// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5619// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5620// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5621// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5622// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5623// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5624// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5625// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
5626// IEM_MC_END();
5627// return VINF_SUCCESS;
5628//}
5629
5630
5631/**
5632 * @opmaps vexgrp15
5633 * @opcode !11/3
5634 * @oppfx none
5635 * @opcpuid avx
5636 * @opgroup og_avx_mxcsrsm
5637 * @opxcpttype 5
5638 * @optest mxcsr=0 -> op1=0
5639 * @optest mxcsr=0x2083 -> op1=0x2083
5640 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5641 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5642 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5643 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5644 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5645 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5646 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5647 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5648 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5649 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5650 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5651 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5652 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5653 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5654 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5655 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5656 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5657 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5658 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5659 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5660 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5661 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5662 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5663 * -> value.xcpt=0x6
5664 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5665 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5666 * APMv4 rev 3.17 page 509.
5667 * @todo Test this instruction on AMD Ryzen.
5668 */
5669FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5670{
5671 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5672 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5673 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5675 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5676 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5677 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5678 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5679 IEM_MC_END();
5680}
5681
5682/* Opcode VEX.0F 0xae mem/4 - invalid. */
5683/* Opcode VEX.0F 0xae mem/5 - invalid. */
5684/* Opcode VEX.0F 0xae mem/6 - invalid. */
5685/* Opcode VEX.0F 0xae mem/7 - invalid. */
5686
5687/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5688/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5689/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5690/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5691/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5692/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5693/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5694/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5695
5696/**
5697 * Vex group 15 jump table for memory variant.
5698 */
5699IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5700{ /* pfx: none, 066h, 0f3h, 0f2h */
5701 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5702 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5703 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5704 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5705 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5706 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5707 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5708 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5709};
5710AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5711
5712
5713/** Opcode vex. 0xae. */
5714FNIEMOP_DEF(iemOp_VGrp15)
5715{
5716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5717 if (IEM_IS_MODRM_REG_MODE(bRm))
5718 /* register, register */
5719 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5720
5721 /* memory, register */
5722 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5723 + pVCpu->iem.s.idxPrefix], bRm);
5724}
5725
5726
5727/* Opcode VEX.0F 0xaf - invalid. */
5728
5729/* Opcode VEX.0F 0xb0 - invalid. */
5730/* Opcode VEX.0F 0xb1 - invalid. */
5731/* Opcode VEX.0F 0xb2 - invalid. */
5732/* Opcode VEX.0F 0xb2 - invalid. */
5733/* Opcode VEX.0F 0xb3 - invalid. */
5734/* Opcode VEX.0F 0xb4 - invalid. */
5735/* Opcode VEX.0F 0xb5 - invalid. */
5736/* Opcode VEX.0F 0xb6 - invalid. */
5737/* Opcode VEX.0F 0xb7 - invalid. */
5738/* Opcode VEX.0F 0xb8 - invalid. */
5739/* Opcode VEX.0F 0xb9 - invalid. */
5740/* Opcode VEX.0F 0xba - invalid. */
5741/* Opcode VEX.0F 0xbb - invalid. */
5742/* Opcode VEX.0F 0xbc - invalid. */
5743/* Opcode VEX.0F 0xbd - invalid. */
5744/* Opcode VEX.0F 0xbe - invalid. */
5745/* Opcode VEX.0F 0xbf - invalid. */
5746
5747/* Opcode VEX.0F 0xc0 - invalid. */
5748/* Opcode VEX.66.0F 0xc0 - invalid. */
5749/* Opcode VEX.F3.0F 0xc0 - invalid. */
5750/* Opcode VEX.F2.0F 0xc0 - invalid. */
5751
5752/* Opcode VEX.0F 0xc1 - invalid. */
5753/* Opcode VEX.66.0F 0xc1 - invalid. */
5754/* Opcode VEX.F3.0F 0xc1 - invalid. */
5755/* Opcode VEX.F2.0F 0xc1 - invalid. */
5756
5757#define IEMOP_VCMPP_BODY(a_Instr) \
5758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5759 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5760 { \
5761 /* \
5762 * Register, Register. \
5763 */ \
5764 if (pVCpu->iem.s.uVexLength) \
5765 { \
5766 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5768 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5769 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5770 IEM_MC_PREPARE_AVX_USAGE(); \
5771 IEM_MC_LOCAL(X86YMMREG, uDst); \
5772 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5773 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5774 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5775 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5776 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5777 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5778 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5779 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5780 puDst, puSrc, bImmArg); \
5781 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5782 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5784 IEM_MC_END(); \
5785 } \
5786 else \
5787 { \
5788 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5789 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5790 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5792 IEM_MC_PREPARE_AVX_USAGE(); \
5793 IEM_MC_LOCAL(X86XMMREG, uDst); \
5794 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5795 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5796 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5797 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5798 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5799 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5800 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5801 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5802 puDst, puSrc, bImmArg); \
5803 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5804 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5806 IEM_MC_END(); \
5807 } \
5808 } \
5809 else \
5810 { \
5811 /* \
5812 * Register, Memory. \
5813 */ \
5814 if (pVCpu->iem.s.uVexLength) \
5815 { \
5816 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5819 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5822 IEM_MC_PREPARE_AVX_USAGE(); \
5823 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5824 IEM_MC_LOCAL(X86YMMREG, uDst); \
5825 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5826 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5827 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5828 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5829 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5830 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5831 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5832 puDst, puSrc, bImmArg); \
5833 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5834 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5835 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5836 IEM_MC_END(); \
5837 } \
5838 else \
5839 { \
5840 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5843 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5844 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5845 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5846 IEM_MC_PREPARE_AVX_USAGE(); \
5847 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5848 IEM_MC_LOCAL(X86XMMREG, uDst); \
5849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5850 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5851 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5852 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5853 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5854 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5855 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5856 puDst, puSrc, bImmArg); \
5857 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5858 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5860 IEM_MC_END(); \
5861 } \
5862 } \
5863 (void)0
5864
5865
5866/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5867FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5868{
5869 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5870 IEMOP_VCMPP_BODY(vcmpps);
5871}
5872
5873
5874/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5875FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5876{
5877 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5878 IEMOP_VCMPP_BODY(vcmppd);
5879}
5880
5881
5882/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5883FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5884{
5885 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5886
5887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5888 if (IEM_IS_MODRM_REG_MODE(bRm))
5889 {
5890 /*
5891 * XMM32, XMM32.
5892 */
5893 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5894 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5895 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5896 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5897 IEM_MC_PREPARE_AVX_USAGE();
5898 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5899 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5900 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5901 IEM_MC_LOCAL(X86XMMREG, uDst);
5902 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5903 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5904 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5905 puDst, puSrc, bImmArg);
5906 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5907 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5908
5909 IEM_MC_ADVANCE_RIP_AND_FINISH();
5910 IEM_MC_END();
5911 }
5912 else
5913 {
5914 /*
5915 * XMM32, [mem32].
5916 */
5917 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5918
5919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5921 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5922 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5924 IEM_MC_PREPARE_AVX_USAGE();
5925
5926 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5927 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5928 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5929 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5930 IEM_MC_LOCAL(X86XMMREG, uDst);
5931 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5932 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5933 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5934 puDst, puSrc, bImmArg);
5935 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5936 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5937
5938 IEM_MC_ADVANCE_RIP_AND_FINISH();
5939 IEM_MC_END();
5940 }
5941}
5942
5943
5944/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5945FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5946{
5947 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5948
5949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5950 if (IEM_IS_MODRM_REG_MODE(bRm))
5951 {
5952 /*
5953 * XMM64, XMM64.
5954 */
5955 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5956 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5957 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5958 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5959 IEM_MC_PREPARE_AVX_USAGE();
5960 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5961 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5962 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5963 IEM_MC_LOCAL(X86XMMREG, uDst);
5964 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5965 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5966 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5967 puDst, puSrc, bImmArg);
5968 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5969 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5970
5971 IEM_MC_ADVANCE_RIP_AND_FINISH();
5972 IEM_MC_END();
5973 }
5974 else
5975 {
5976 /*
5977 * XMM64, [mem64].
5978 */
5979 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5980
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5983 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5984 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5985 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5986 IEM_MC_PREPARE_AVX_USAGE();
5987
5988 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5989 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5990 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5991 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5992 IEM_MC_LOCAL(X86XMMREG, uDst);
5993 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5994 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5995 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5996 puDst, puSrc, bImmArg);
5997 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5998 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5999
6000 IEM_MC_ADVANCE_RIP_AND_FINISH();
6001 IEM_MC_END();
6002 }
6003}
6004
6005
6006/* Opcode VEX.0F 0xc3 - invalid */
6007/* Opcode VEX.66.0F 0xc3 - invalid */
6008/* Opcode VEX.F3.0F 0xc3 - invalid */
6009/* Opcode VEX.F2.0F 0xc3 - invalid */
6010
6011/* Opcode VEX.0F 0xc4 - invalid */
6012
6013
6014/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6015FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
6016{
6017 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
6018 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6019 if (IEM_IS_MODRM_REG_MODE(bRm))
6020 {
6021 /*
6022 * Register, register.
6023 */
6024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6025 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6026 IEM_MC_LOCAL(RTUINT128U, uSrc1);
6027 IEM_MC_LOCAL(uint16_t, uValue);
6028
6029 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6031 IEM_MC_PREPARE_AVX_USAGE();
6032
6033 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
6034 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
6035 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
6036 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
6037 IEM_MC_ADVANCE_RIP_AND_FINISH();
6038 IEM_MC_END();
6039 }
6040 else
6041 {
6042 /*
6043 * Register, memory.
6044 */
6045 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6047 IEM_MC_LOCAL(RTUINT128U, uSrc1);
6048 IEM_MC_LOCAL(uint16_t, uValue);
6049
6050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6051 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6052 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6053 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6054 IEM_MC_PREPARE_AVX_USAGE();
6055
6056 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
6057 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6058 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
6059 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
6060 IEM_MC_ADVANCE_RIP_AND_FINISH();
6061 IEM_MC_END();
6062 }
6063}
6064
6065
6066/* Opcode VEX.F3.0F 0xc4 - invalid */
6067/* Opcode VEX.F2.0F 0xc4 - invalid */
6068
6069/* Opcode VEX.0F 0xc5 - invalid */
6070
6071
6072/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
6073FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
6074{
6075 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
6076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6077 if (IEM_IS_MODRM_REG_MODE(bRm))
6078 {
6079 /*
6080 * greg32, XMM, imm8.
6081 */
6082 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6083 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6084 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6085 IEM_MC_LOCAL(uint16_t, uValue);
6086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6087 IEM_MC_PREPARE_AVX_USAGE();
6088 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
6089 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
6090 IEM_MC_ADVANCE_RIP_AND_FINISH();
6091 IEM_MC_END();
6092 }
6093 /* No memory operand. */
6094 else
6095 IEMOP_RAISE_INVALID_OPCODE_RET();
6096}
6097
6098
6099/* Opcode VEX.F3.0F 0xc5 - invalid */
6100/* Opcode VEX.F2.0F 0xc5 - invalid */
6101
6102
6103#define VSHUFP_X(a_Instr) \
6104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
6105 if (IEM_IS_MODRM_REG_MODE(bRm)) \
6106 { \
6107 /* \
6108 * Register, register. \
6109 */ \
6110 if (pVCpu->iem.s.uVexLength) \
6111 { \
6112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6115 IEM_MC_LOCAL(RTUINT256U, uDst); \
6116 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6117 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6118 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6119 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6120 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6121 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6122 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6123 IEM_MC_PREPARE_AVX_USAGE(); \
6124 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6125 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6126 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6127 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6128 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6129 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6130 IEM_MC_END(); \
6131 } \
6132 else \
6133 { \
6134 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6135 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6136 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6137 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6138 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6139 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
6140 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6141 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6142 IEM_MC_PREPARE_AVX_USAGE(); \
6143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6145 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6146 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6147 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6148 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6149 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6150 IEM_MC_END(); \
6151 } \
6152 } \
6153 else \
6154 { \
6155 /* \
6156 * Register, memory. \
6157 */ \
6158 if (pVCpu->iem.s.uVexLength) \
6159 { \
6160 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6161 IEM_MC_LOCAL(RTUINT256U, uDst); \
6162 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6163 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6165 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6166 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6167 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6169 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6170 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6171 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6173 IEM_MC_PREPARE_AVX_USAGE(); \
6174 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6175 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6176 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6177 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6178 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6179 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6180 IEM_MC_END(); \
6181 } \
6182 else \
6183 { \
6184 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6185 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
6186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6187 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6188 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6189 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
6190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6191 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6192 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6195 IEM_MC_PREPARE_AVX_USAGE(); \
6196 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6197 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6198 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6199 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6200 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6201 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6202 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6203 IEM_MC_END(); \
6204 } \
6205 } \
6206 (void)0
6207
6208/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
6209FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
6210{
6211 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6212 VSHUFP_X(vshufps);
6213}
6214
6215
6216/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6217FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
6218{
6219 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6220 VSHUFP_X(vshufpd);
6221}
6222#undef VSHUFP_X
6223
6224
6225/* Opcode VEX.F3.0F 0xc6 - invalid */
6226/* Opcode VEX.F2.0F 0xc6 - invalid */
6227
6228/* Opcode VEX.0F 0xc7 - invalid */
6229/* Opcode VEX.66.0F 0xc7 - invalid */
6230/* Opcode VEX.F3.0F 0xc7 - invalid */
6231/* Opcode VEX.F2.0F 0xc7 - invalid */
6232
6233/* Opcode VEX.0F 0xc8 - invalid */
6234/* Opcode VEX.0F 0xc9 - invalid */
6235/* Opcode VEX.0F 0xca - invalid */
6236/* Opcode VEX.0F 0xcb - invalid */
6237/* Opcode VEX.0F 0xcc - invalid */
6238/* Opcode VEX.0F 0xcd - invalid */
6239/* Opcode VEX.0F 0xce - invalid */
6240/* Opcode VEX.0F 0xcf - invalid */
6241
6242
6243/* Opcode VEX.0F 0xd0 - invalid */
6244
6245
6246/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
6247FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
6248{
6249 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6250 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
6251 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6252}
6253
6254
6255/* Opcode VEX.F3.0F 0xd0 - invalid */
6256
6257
6258/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
6259FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
6260{
6261 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6262 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
6263 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6264}
6265
6266
6267/* Opcode VEX.0F 0xd1 - invalid */
6268
6269
6270/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
6271FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
6272{
6273 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6274 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
6275 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6276}
6277
6278/* Opcode VEX.F3.0F 0xd1 - invalid */
6279/* Opcode VEX.F2.0F 0xd1 - invalid */
6280
6281/* Opcode VEX.0F 0xd2 - invalid */
6282/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
6283FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
6284{
6285 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6286 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
6287 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6288}
6289
6290/* Opcode VEX.F3.0F 0xd2 - invalid */
6291/* Opcode VEX.F2.0F 0xd2 - invalid */
6292
6293/* Opcode VEX.0F 0xd3 - invalid */
6294/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
6295FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
6296{
6297 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6298 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
6299 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6300}
6301
6302/* Opcode VEX.F3.0F 0xd3 - invalid */
6303/* Opcode VEX.F2.0F 0xd3 - invalid */
6304
6305/* Opcode VEX.0F 0xd4 - invalid */
6306
6307
6308/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
6309FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
6310{
6311 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6312 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
6313 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6314}
6315
6316
6317/* Opcode VEX.F3.0F 0xd4 - invalid */
6318/* Opcode VEX.F2.0F 0xd4 - invalid */
6319
6320/* Opcode VEX.0F 0xd5 - invalid */
6321
6322
6323/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
6324FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
6325{
6326 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6327 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
6328 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6329}
6330
6331
6332/* Opcode VEX.F3.0F 0xd5 - invalid */
6333/* Opcode VEX.F2.0F 0xd5 - invalid */
6334
6335/* Opcode VEX.0F 0xd6 - invalid */
6336
6337/**
6338 * @opcode 0xd6
6339 * @oppfx 0x66
6340 * @opcpuid avx
6341 * @opgroup og_avx_pcksclr_datamove
6342 * @opxcpttype none
6343 * @optest op1=-1 op2=2 -> op1=2
6344 * @optest op1=0 op2=-42 -> op1=-42
6345 */
6346FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
6347{
6348 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
6349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6350 if (IEM_IS_MODRM_REG_MODE(bRm))
6351 {
6352 /*
6353 * Register, register.
6354 */
6355 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6356 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6357
6358 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6359 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6360
6361 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
6362 IEM_GET_MODRM_REG(pVCpu, bRm));
6363 IEM_MC_ADVANCE_RIP_AND_FINISH();
6364 IEM_MC_END();
6365 }
6366 else
6367 {
6368 /*
6369 * Memory, register.
6370 */
6371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6372 IEM_MC_LOCAL(uint64_t, uSrc);
6373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6374
6375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6376 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6377 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6378 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6379
6380 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
6381 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6382
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 }
6386}
6387
6388/* Opcode VEX.F3.0F 0xd6 - invalid */
6389/* Opcode VEX.F2.0F 0xd6 - invalid */
6390
6391
6392/* Opcode VEX.0F 0xd7 - invalid */
6393
6394/** Opcode VEX.66.0F 0xd7 - */
6395FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
6396{
6397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6398 /* Docs says register only. */
6399 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
6400 {
6401 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6402 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
6403 if (pVCpu->iem.s.uVexLength)
6404 {
6405 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6406 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
6407 IEM_MC_ARG(uint64_t *, puDst, 0);
6408 IEM_MC_LOCAL(RTUINT256U, uSrc);
6409 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
6410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6411 IEM_MC_PREPARE_AVX_USAGE();
6412 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6413 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6414 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
6415 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
6416 IEM_MC_ADVANCE_RIP_AND_FINISH();
6417 IEM_MC_END();
6418 }
6419 else
6420 {
6421 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6422 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6423 IEM_MC_ARG(uint64_t *, puDst, 0);
6424 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6425 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6426 IEM_MC_PREPARE_AVX_USAGE();
6427 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6428 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6429 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
6430 IEM_MC_ADVANCE_RIP_AND_FINISH();
6431 IEM_MC_END();
6432 }
6433 }
6434 else
6435 IEMOP_RAISE_INVALID_OPCODE_RET();
6436}
6437
6438
6439/* Opcode VEX.F3.0F 0xd7 - invalid */
6440/* Opcode VEX.F2.0F 0xd7 - invalid */
6441
6442
6443/* Opcode VEX.0F 0xd8 - invalid */
6444
6445/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
6446FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
6447{
6448 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6449 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
6450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6451}
6452
6453
6454/* Opcode VEX.F3.0F 0xd8 - invalid */
6455/* Opcode VEX.F2.0F 0xd8 - invalid */
6456
6457/* Opcode VEX.0F 0xd9 - invalid */
6458
6459
6460/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
6461FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
6462{
6463 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6464 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
6465 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6466}
6467
6468
6469/* Opcode VEX.F3.0F 0xd9 - invalid */
6470/* Opcode VEX.F2.0F 0xd9 - invalid */
6471
6472/* Opcode VEX.0F 0xda - invalid */
6473
6474
6475/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6476FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6477{
6478 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6479 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6480 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6481}
6482
6483
6484/* Opcode VEX.F3.0F 0xda - invalid */
6485/* Opcode VEX.F2.0F 0xda - invalid */
6486
6487/* Opcode VEX.0F 0xdb - invalid */
6488
6489
6490/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6491FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6492{
6493 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6494 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6495 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6496}
6497
6498
6499/* Opcode VEX.F3.0F 0xdb - invalid */
6500/* Opcode VEX.F2.0F 0xdb - invalid */
6501
6502/* Opcode VEX.0F 0xdc - invalid */
6503
6504
6505/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6506FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6507{
6508 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6509 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6510 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6511}
6512
6513
6514/* Opcode VEX.F3.0F 0xdc - invalid */
6515/* Opcode VEX.F2.0F 0xdc - invalid */
6516
6517/* Opcode VEX.0F 0xdd - invalid */
6518
6519
6520/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6521FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6522{
6523 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6524 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6525 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6526}
6527
6528
6529/* Opcode VEX.F3.0F 0xdd - invalid */
6530/* Opcode VEX.F2.0F 0xdd - invalid */
6531
6532/* Opcode VEX.0F 0xde - invalid */
6533
6534
6535/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6536FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6537{
6538 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6539 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6541}
6542
6543
6544/* Opcode VEX.F3.0F 0xde - invalid */
6545/* Opcode VEX.F2.0F 0xde - invalid */
6546
6547/* Opcode VEX.0F 0xdf - invalid */
6548
6549
6550/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6551FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6552{
6553 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6554 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6555 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6556}
6557
6558
6559/* Opcode VEX.F3.0F 0xdf - invalid */
6560/* Opcode VEX.F2.0F 0xdf - invalid */
6561
6562/* Opcode VEX.0F 0xe0 - invalid */
6563
6564
6565/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6566FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6567{
6568 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6569 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6570 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6571}
6572
6573
6574/* Opcode VEX.F3.0F 0xe0 - invalid */
6575/* Opcode VEX.F2.0F 0xe0 - invalid */
6576
6577/* Opcode VEX.0F 0xe1 - invalid */
6578/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6579FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6580{
6581 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6582 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6583 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6584}
6585
6586/* Opcode VEX.F3.0F 0xe1 - invalid */
6587/* Opcode VEX.F2.0F 0xe1 - invalid */
6588
6589/* Opcode VEX.0F 0xe2 - invalid */
6590/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6591FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6592{
6593 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6594 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6595 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6596}
6597
6598/* Opcode VEX.F3.0F 0xe2 - invalid */
6599/* Opcode VEX.F2.0F 0xe2 - invalid */
6600
6601/* Opcode VEX.0F 0xe3 - invalid */
6602
6603
6604/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6605FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6606{
6607 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6608 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6609 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6610}
6611
6612
6613/* Opcode VEX.F3.0F 0xe3 - invalid */
6614/* Opcode VEX.F2.0F 0xe3 - invalid */
6615
6616/* Opcode VEX.0F 0xe4 - invalid */
6617
6618
6619/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6620FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6621{
6622 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6623 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6624 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6625}
6626
6627
6628/* Opcode VEX.F3.0F 0xe4 - invalid */
6629/* Opcode VEX.F2.0F 0xe4 - invalid */
6630
6631/* Opcode VEX.0F 0xe5 - invalid */
6632
6633
6634/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6635FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6636{
6637 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6638 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6639 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6640}
6641
6642
6643/* Opcode VEX.F3.0F 0xe5 - invalid */
6644/* Opcode VEX.F2.0F 0xe5 - invalid */
6645
6646/* Opcode VEX.0F 0xe6 - invalid */
6647/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6648FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
6649/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6650FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
6651/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6652FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
6653
6654
6655/* Opcode VEX.0F 0xe7 - invalid */
6656
6657/**
6658 * @opcode 0xe7
6659 * @opcodesub !11 mr/reg
6660 * @oppfx 0x66
6661 * @opcpuid avx
6662 * @opgroup og_avx_cachect
6663 * @opxcpttype 1
6664 * @optest op1=-1 op2=2 -> op1=2
6665 * @optest op1=0 op2=-42 -> op1=-42
6666 */
6667FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6668{
6669 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6670 Assert(pVCpu->iem.s.uVexLength <= 1);
6671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6672 if (IEM_IS_MODRM_MEM_MODE(bRm))
6673 {
6674 if (pVCpu->iem.s.uVexLength == 0)
6675 {
6676 /*
6677 * 128-bit: Memory, register.
6678 */
6679 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6680 IEM_MC_LOCAL(RTUINT128U, uSrc);
6681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6682
6683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6684 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6685 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6686 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6687
6688 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6689 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6690
6691 IEM_MC_ADVANCE_RIP_AND_FINISH();
6692 IEM_MC_END();
6693 }
6694 else
6695 {
6696 /*
6697 * 256-bit: Memory, register.
6698 */
6699 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6700 IEM_MC_LOCAL(RTUINT256U, uSrc);
6701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6702
6703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6704 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6705 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6706 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6707
6708 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6709 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6710
6711 IEM_MC_ADVANCE_RIP_AND_FINISH();
6712 IEM_MC_END();
6713 }
6714 }
6715 /**
6716 * @opdone
6717 * @opmnemonic udvex660fe7reg
6718 * @opcode 0xe7
6719 * @opcodesub 11 mr/reg
6720 * @oppfx 0x66
6721 * @opunused immediate
6722 * @opcpuid avx
6723 * @optest ->
6724 */
6725 else
6726 IEMOP_RAISE_INVALID_OPCODE_RET();
6727}
6728
6729/* Opcode VEX.F3.0F 0xe7 - invalid */
6730/* Opcode VEX.F2.0F 0xe7 - invalid */
6731
6732
6733/* Opcode VEX.0F 0xe8 - invalid */
6734
6735
6736/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6737FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6738{
6739 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6740 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
6741 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6742}
6743
6744
6745/* Opcode VEX.F3.0F 0xe8 - invalid */
6746/* Opcode VEX.F2.0F 0xe8 - invalid */
6747
6748/* Opcode VEX.0F 0xe9 - invalid */
6749
6750
6751/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
6752FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
6753{
6754 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6755 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
6756 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6757}
6758
6759
6760/* Opcode VEX.F3.0F 0xe9 - invalid */
6761/* Opcode VEX.F2.0F 0xe9 - invalid */
6762
6763/* Opcode VEX.0F 0xea - invalid */
6764
6765
6766/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
6767FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
6768{
6769 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6770 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
6771 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6772}
6773
6774
6775/* Opcode VEX.F3.0F 0xea - invalid */
6776/* Opcode VEX.F2.0F 0xea - invalid */
6777
6778/* Opcode VEX.0F 0xeb - invalid */
6779
6780
6781/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
6782FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
6783{
6784 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6785 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6786 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
6787}
6788
6789
6790
6791/* Opcode VEX.F3.0F 0xeb - invalid */
6792/* Opcode VEX.F2.0F 0xeb - invalid */
6793
6794/* Opcode VEX.0F 0xec - invalid */
6795
6796
6797/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
6798FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
6799{
6800 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6801 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
6802 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6803}
6804
6805
6806/* Opcode VEX.F3.0F 0xec - invalid */
6807/* Opcode VEX.F2.0F 0xec - invalid */
6808
6809/* Opcode VEX.0F 0xed - invalid */
6810
6811
6812/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
6813FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
6814{
6815 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6816 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
6817 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6818}
6819
6820
6821/* Opcode VEX.F3.0F 0xed - invalid */
6822/* Opcode VEX.F2.0F 0xed - invalid */
6823
6824/* Opcode VEX.0F 0xee - invalid */
6825
6826
6827/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
6828FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
6829{
6830 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6831 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
6832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6833}
6834
6835
6836/* Opcode VEX.F3.0F 0xee - invalid */
6837/* Opcode VEX.F2.0F 0xee - invalid */
6838
6839
6840/* Opcode VEX.0F 0xef - invalid */
6841
6842
6843/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
6844FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
6845{
6846 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6847 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6848 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
6849}
6850
6851
6852/* Opcode VEX.F3.0F 0xef - invalid */
6853/* Opcode VEX.F2.0F 0xef - invalid */
6854
6855/* Opcode VEX.0F 0xf0 - invalid */
6856/* Opcode VEX.66.0F 0xf0 - invalid */
6857
6858
6859/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
6860FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
6861{
6862 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6863 Assert(pVCpu->iem.s.uVexLength <= 1);
6864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6865 if (IEM_IS_MODRM_REG_MODE(bRm))
6866 {
6867 /*
6868 * Register, register - (not implemented, assuming it raises \#UD).
6869 */
6870 IEMOP_RAISE_INVALID_OPCODE_RET();
6871 }
6872 else if (pVCpu->iem.s.uVexLength == 0)
6873 {
6874 /*
6875 * Register, memory128.
6876 */
6877 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6878 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6880
6881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6882 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6883 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6884 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6885
6886 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6887 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6888
6889 IEM_MC_ADVANCE_RIP_AND_FINISH();
6890 IEM_MC_END();
6891 }
6892 else
6893 {
6894 /*
6895 * Register, memory256.
6896 */
6897 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6898 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
6899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6900
6901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6902 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6903 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6904 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6905
6906 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6907 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
6908
6909 IEM_MC_ADVANCE_RIP_AND_FINISH();
6910 IEM_MC_END();
6911 }
6912}
6913
6914
6915/* Opcode VEX.0F 0xf1 - invalid */
6916/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
6917FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
6918{
6919 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6920 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
6921 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6922}
6923
6924/* Opcode VEX.F2.0F 0xf1 - invalid */
6925
6926/* Opcode VEX.0F 0xf2 - invalid */
6927/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
6928FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
6929{
6930 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6931 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
6932 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6933}
6934/* Opcode VEX.F2.0F 0xf2 - invalid */
6935
6936/* Opcode VEX.0F 0xf3 - invalid */
6937/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
6938FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
6939{
6940 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6941 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
6942 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6943}
6944/* Opcode VEX.F2.0F 0xf3 - invalid */
6945
6946/* Opcode VEX.0F 0xf4 - invalid */
6947
6948
6949/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
6950FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
6951{
6952 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6953 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
6954 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6955}
6956
6957
6958/* Opcode VEX.F2.0F 0xf4 - invalid */
6959
6960/* Opcode VEX.0F 0xf5 - invalid */
6961
6962
6963/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
6964FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
6965{
6966 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6967 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
6968 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6969}
6970
6971
6972/* Opcode VEX.F2.0F 0xf5 - invalid */
6973
6974/* Opcode VEX.0F 0xf6 - invalid */
6975
6976
6977/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
6978FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
6979{
6980 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6981 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
6982 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6983}
6984
6985
6986/* Opcode VEX.F2.0F 0xf6 - invalid */
6987
6988/* Opcode VEX.0F 0xf7 - invalid */
6989
6990
6991/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
6992FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
6993{
6994// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
6995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6996 if (IEM_IS_MODRM_REG_MODE(bRm))
6997 {
6998 /*
6999 * XMM, XMM, (implicit) [ ER]DI
7000 */
7001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7002 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
7003 IEM_MC_LOCAL( uint64_t, u64EffAddr);
7004 IEM_MC_LOCAL( RTUINT128U, u128Mem);
7005 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
7006 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
7007 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
7008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7009 IEM_MC_PREPARE_AVX_USAGE();
7010
7011 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
7012 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
7013 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
7014 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
7015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
7016 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
7017
7018 IEM_MC_ADVANCE_RIP_AND_FINISH();
7019 IEM_MC_END();
7020 }
7021 else
7022 {
7023 /* The memory, register encoding is invalid. */
7024 IEMOP_RAISE_INVALID_OPCODE_RET();
7025 }
7026}
7027
7028
7029/* Opcode VEX.F2.0F 0xf7 - invalid */
7030
7031/* Opcode VEX.0F 0xf8 - invalid */
7032
7033
7034/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
7035FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
7036{
7037 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7038 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
7039 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7040}
7041
7042
7043/* Opcode VEX.F2.0F 0xf8 - invalid */
7044
7045/* Opcode VEX.0F 0xf9 - invalid */
7046
7047
7048/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
7049FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
7050{
7051 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7052 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
7053 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7054}
7055
7056
7057/* Opcode VEX.F2.0F 0xf9 - invalid */
7058
7059/* Opcode VEX.0F 0xfa - invalid */
7060
7061
7062/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
7063FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
7064{
7065 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7066 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
7067 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7068}
7069
7070
7071/* Opcode VEX.F2.0F 0xfa - invalid */
7072
7073/* Opcode VEX.0F 0xfb - invalid */
7074
7075
7076/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
7077FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
7078{
7079 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7080 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
7081 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7082}
7083
7084
7085/* Opcode VEX.F2.0F 0xfb - invalid */
7086
7087/* Opcode VEX.0F 0xfc - invalid */
7088
7089
7090/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
7091FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
7092{
7093 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7094 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
7095 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7096}
7097
7098
7099/* Opcode VEX.F2.0F 0xfc - invalid */
7100
7101/* Opcode VEX.0F 0xfd - invalid */
7102
7103
7104/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
7105FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
7106{
7107 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7108 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
7109 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7110}
7111
7112
7113/* Opcode VEX.F2.0F 0xfd - invalid */
7114
7115/* Opcode VEX.0F 0xfe - invalid */
7116
7117
7118/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
7119FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
7120{
7121 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7122 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
7123 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7124}
7125
7126
7127/* Opcode VEX.F2.0F 0xfe - invalid */
7128
7129
7130/** Opcode **** 0x0f 0xff - UD0 */
7131FNIEMOP_DEF(iemOp_vud0)
7132{
7133/** @todo testcase: vud0 */
7134 IEMOP_MNEMONIC(vud0, "vud0");
7135 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7136 {
7137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7138 if (IEM_IS_MODRM_MEM_MODE(bRm))
7139 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
7140 }
7141 IEMOP_HLP_DONE_DECODING();
7142 IEMOP_RAISE_INVALID_OPCODE_RET();
7143}
7144
7145
7146
7147/**
7148 * VEX opcode map \#1.
7149 *
7150 * @sa g_apfnTwoByteMap
7151 */
7152const PFNIEMOP g_apfnVexMap1[] =
7153{
7154 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7155 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7156 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7157 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7158 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7159 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7160 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7161 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7162 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7163 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7164 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7165 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7166 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
7167 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7168 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7169 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7170 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7171
7172 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
7173 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
7174 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7175 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7176 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7177 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7178 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7179 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7180 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7181 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7182 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7183 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7184 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7185 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7186 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7187 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7188
7189 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7190 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7191 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7192 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7193 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7194 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7195 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7196 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7197 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7198 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7199 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7200 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7201 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7202 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7203 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7204 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7205
7206 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7207 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7208 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7209 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7210 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7211 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7212 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7213 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7214 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7215 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7216 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7217 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7218 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7219 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7220 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7221 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7222
7223 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7224 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7225 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7226 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7227 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7228 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7229 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7230 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7231 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7232 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7233 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7234 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7235 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7236 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7237 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7238 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7239
7240 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7241 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7242 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7243 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7244 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7245 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7246 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7247 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7248 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7249 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7250 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7251 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7252 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7253 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7254 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7255 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7256
7257 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7258 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7259 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7260 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7261 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7262 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7263 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7264 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7265 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7266 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7267 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7268 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7269 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7270 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7271 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7272 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7273
7274 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7275 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7276 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7277 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7278 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7279 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7280 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7281 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7282 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
7283 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
7284 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
7285 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
7286 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7287 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7288 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7289 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7290
7291 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
7292 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
7293 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
7294 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
7295 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
7296 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
7297 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
7298 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
7299 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
7300 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
7301 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
7302 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
7303 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
7304 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
7305 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
7306 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
7307
7308 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
7309 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
7310 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
7311 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
7312 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
7313 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
7314 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
7315 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
7316 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
7317 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
7318 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
7319 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
7320 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
7321 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
7322 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
7323 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
7324
7325 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7326 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7327 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7328 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7329 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7330 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7331 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7332 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7333 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7334 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7335 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
7336 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
7337 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
7338 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
7339 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
7340 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
7341
7342 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7343 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7344 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7345 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7346 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7347 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7348 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7349 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7350 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7351 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7352 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
7353 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
7354 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
7355 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
7356 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
7357 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
7358
7359 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7360 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7361 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7362 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7363 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7364 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7365 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7366 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7367 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7368 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7369 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
7370 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
7371 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
7372 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
7373 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
7374 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
7375
7376 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7377 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7378 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7379 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7380 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7381 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7382 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7383 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7384 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7385 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7386 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7387 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7388 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7389 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7390 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7391 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7392
7393 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7394 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7395 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7396 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7397 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7398 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7399 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7400 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7401 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7402 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7403 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7404 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7405 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7406 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7407 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7408 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7409
7410 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7411 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7412 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7413 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7414 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7415 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7416 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7417 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7418 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7419 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7420 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7421 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7422 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7423 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7424 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7425 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
7426};
7427AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
7428/** @} */
7429
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette