VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105414

Last change on this file since 105414 was 105357, checked in by vboxsync, 8 months ago

VMM/IEM: Implement vstmxcsr instruction emulation (test missing), bugref:9898 [build fix]

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 302.4 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105357 2024-07-16 13:08:32Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
151 * - vxxxss xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
216 * - vxxxsd xmm0, xmm1, xmm2/mem64
217 *
218 * Exceptions type 4. AVX cpuid check for 128-bit operation.
219 * Ignores VEX.L, from SDM:
220 * Software should ensure VADDSD is encoded with VEX.L=0.
221 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
222 * across different processor generations.
223 */
224FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
225{
226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
227 if (IEM_IS_MODRM_REG_MODE(bRm))
228 {
229 /*
230 * Register, register.
231 */
232 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
233 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
234 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
235 IEM_MC_PREPARE_AVX_USAGE();
236
237 IEM_MC_LOCAL(X86XMMREG, uDst);
238 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
239 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
240 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
241 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
242 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
243 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
244 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
245 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
246 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 /*
253 * Register, memory.
254 */
255 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
258 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
260 IEM_MC_PREPARE_AVX_USAGE();
261
262 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
263 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
264 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
265 IEM_MC_LOCAL(X86XMMREG, uDst);
266 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
267 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
268 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
269 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
270 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
271 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
272 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
273 IEM_MC_ADVANCE_RIP_AND_FINISH();
274 IEM_MC_END();
275 }
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpxxx xmm0, xmm1, xmm2/mem128
282 * - vpxxx ymm0, ymm1, ymm2/mem256
283 *
284 * Takes function table for function w/o implicit state parameter.
285 *
286 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
287 */
288FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
289{
290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
291 if (IEM_IS_MODRM_REG_MODE(bRm))
292 {
293 /*
294 * Register, register.
295 */
296 if (pVCpu->iem.s.uVexLength)
297 {
298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
299 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
300 IEM_MC_LOCAL(RTUINT256U, uDst);
301 IEM_MC_LOCAL(RTUINT256U, uSrc1);
302 IEM_MC_LOCAL(RTUINT256U, uSrc2);
303 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
305 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
307 IEM_MC_PREPARE_AVX_USAGE();
308 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
309 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
310 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
311 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
312 IEM_MC_ADVANCE_RIP_AND_FINISH();
313 IEM_MC_END();
314 }
315 else
316 {
317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
318 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
319 IEM_MC_ARG(PRTUINT128U, puDst, 0);
320 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
321 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_PREPARE_AVX_USAGE();
324 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
325 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
326 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
327 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
328 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
329 IEM_MC_ADVANCE_RIP_AND_FINISH();
330 IEM_MC_END();
331 }
332 }
333 else
334 {
335 /*
336 * Register, memory.
337 */
338 if (pVCpu->iem.s.uVexLength)
339 {
340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
341 IEM_MC_LOCAL(RTUINT256U, uDst);
342 IEM_MC_LOCAL(RTUINT256U, uSrc1);
343 IEM_MC_LOCAL(RTUINT256U, uSrc2);
344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
345 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
346 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
347 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_PREPARE_AVX_USAGE();
353
354 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
357 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
358
359 IEM_MC_ADVANCE_RIP_AND_FINISH();
360 IEM_MC_END();
361 }
362 else
363 {
364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
365 IEM_MC_LOCAL(RTUINT128U, uSrc2);
366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
367 IEM_MC_ARG(PRTUINT128U, puDst, 0);
368 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
369 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
370
371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
372 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
374 IEM_MC_PREPARE_AVX_USAGE();
375
376 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
377 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
378 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
380 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
381
382 IEM_MC_ADVANCE_RIP_AND_FINISH();
383 IEM_MC_END();
384 }
385 }
386}
387
388
389/**
390 * Common worker for AVX2 instructions on the forms:
391 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
392 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
393 *
394 * The 128-bit memory version of this instruction may elect to skip fetching the
395 * lower 64 bits of the operand. We, however, do not.
396 *
397 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
398 */
399FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
400{
401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
402}
403
404
405/**
406 * Common worker for AVX2 instructions on the forms:
407 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
408 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
409 *
410 * The 128-bit memory version of this instruction may elect to skip fetching the
411 * higher 64 bits of the operand. We, however, do not.
412 *
413 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
414 */
415FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
416{
417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
418}
419
420
421/**
422 * Common worker for AVX2 instructions on the forms:
423 * - vpxxx xmm0, xmm1/mem128
424 * - vpxxx ymm0, ymm1/mem256
425 *
426 * Takes function table for function w/o implicit state parameter.
427 *
428 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
429 */
430FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
431{
432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
433 if (IEM_IS_MODRM_REG_MODE(bRm))
434 {
435 /*
436 * Register, register.
437 */
438 if (pVCpu->iem.s.uVexLength)
439 {
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
442 IEM_MC_LOCAL(RTUINT256U, uDst);
443 IEM_MC_LOCAL(RTUINT256U, uSrc);
444 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
445 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
447 IEM_MC_PREPARE_AVX_USAGE();
448 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
450 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
451 IEM_MC_ADVANCE_RIP_AND_FINISH();
452 IEM_MC_END();
453 }
454 else
455 {
456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
458 IEM_MC_ARG(PRTUINT128U, puDst, 0);
459 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
461 IEM_MC_PREPARE_AVX_USAGE();
462 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
463 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
464 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466 IEM_MC_ADVANCE_RIP_AND_FINISH();
467 IEM_MC_END();
468 }
469 }
470 else
471 {
472 /*
473 * Register, memory.
474 */
475 if (pVCpu->iem.s.uVexLength)
476 {
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uDst);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
482 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
483
484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
485 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
487 IEM_MC_PREPARE_AVX_USAGE();
488
489 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
490 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
492
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
499 IEM_MC_LOCAL(RTUINT128U, uSrc);
500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
501 IEM_MC_ARG(PRTUINT128U, puDst, 0);
502 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
503
504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
506 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
507 IEM_MC_PREPARE_AVX_USAGE();
508
509 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
510 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
511 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
512 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
513
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 }
518}
519
520
521/**
522 * Common worker for AVX/AVX2 instructions on the forms:
523 * - vpxxx xmm0, xmm1/mem128
524 * - vpxxx ymm0, ymm1/mem256
525 *
526 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
527 */
528FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
529{
530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
531 if (IEM_IS_MODRM_REG_MODE(bRm))
532 {
533 /*
534 * Register, register.
535 */
536 if (pVCpu->iem.s.uVexLength)
537 {
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
540 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
541 IEM_MC_PREPARE_AVX_USAGE();
542
543 IEM_MC_LOCAL(X86YMMREG, uSrc);
544 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
545 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
546 IEM_MC_LOCAL(X86YMMREG, uDst);
547 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
548 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
549 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
550 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
551 IEM_MC_ADVANCE_RIP_AND_FINISH();
552 IEM_MC_END();
553 }
554 else
555 {
556 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_PREPARE_AVX_USAGE();
560
561 IEM_MC_LOCAL(X86XMMREG, uDst);
562 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
563 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
564 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
565 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
566 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
567 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
568 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
569 IEM_MC_ADVANCE_RIP_AND_FINISH();
570 IEM_MC_END();
571 }
572 }
573 else
574 {
575 /*
576 * Register, memory.
577 */
578 if (pVCpu->iem.s.uVexLength)
579 {
580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
583 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
584 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
585 IEM_MC_PREPARE_AVX_USAGE();
586
587 IEM_MC_LOCAL(X86YMMREG, uSrc);
588 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
589 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
590 IEM_MC_LOCAL(X86YMMREG, uDst);
591 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
592 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
594 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
595 IEM_MC_ADVANCE_RIP_AND_FINISH();
596 IEM_MC_END();
597 }
598 else
599 {
600 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
603 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
604 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
605 IEM_MC_PREPARE_AVX_USAGE();
606
607 IEM_MC_LOCAL(X86XMMREG, uDst);
608 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
609 IEM_MC_LOCAL(X86XMMREG, uSrc);
610 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
611 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
612 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
613 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
614 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
615 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
616 IEM_MC_ADVANCE_RIP_AND_FINISH();
617 IEM_MC_END();
618 }
619 }
620}
621
622
623
624/* Opcode VEX.0F 0x00 - invalid */
625/* Opcode VEX.0F 0x01 - invalid */
626/* Opcode VEX.0F 0x02 - invalid */
627/* Opcode VEX.0F 0x03 - invalid */
628/* Opcode VEX.0F 0x04 - invalid */
629/* Opcode VEX.0F 0x05 - invalid */
630/* Opcode VEX.0F 0x06 - invalid */
631/* Opcode VEX.0F 0x07 - invalid */
632/* Opcode VEX.0F 0x08 - invalid */
633/* Opcode VEX.0F 0x09 - invalid */
634/* Opcode VEX.0F 0x0a - invalid */
635
636/** Opcode VEX.0F 0x0b. */
637FNIEMOP_DEF(iemOp_vud2)
638{
639 IEMOP_MNEMONIC(vud2, "vud2");
640 IEMOP_RAISE_INVALID_OPCODE_RET();
641}
642
643/* Opcode VEX.0F 0x0c - invalid */
644/* Opcode VEX.0F 0x0d - invalid */
645/* Opcode VEX.0F 0x0e - invalid */
646/* Opcode VEX.0F 0x0f - invalid */
647
648
649/**
650 * @opcode 0x10
651 * @oppfx none
652 * @opcpuid avx
653 * @opgroup og_avx_simdfp_datamove
654 * @opxcpttype 4UA
655 * @optest op1=1 op2=2 -> op1=2
656 * @optest op1=0 op2=-22 -> op1=-22
657 */
658FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
659{
660 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
661 Assert(pVCpu->iem.s.uVexLength <= 1);
662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
663 if (IEM_IS_MODRM_REG_MODE(bRm))
664 {
665 /*
666 * Register, register.
667 */
668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
669 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
671 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
672 if (pVCpu->iem.s.uVexLength == 0)
673 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
674 IEM_GET_MODRM_RM(pVCpu, bRm));
675 else
676 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
677 IEM_GET_MODRM_RM(pVCpu, bRm));
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681 else if (pVCpu->iem.s.uVexLength == 0)
682 {
683 /*
684 * 128-bit: Register, Memory
685 */
686 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
687 IEM_MC_LOCAL(RTUINT128U, uSrc);
688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
689
690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
693 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
694
695 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
696 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
697
698 IEM_MC_ADVANCE_RIP_AND_FINISH();
699 IEM_MC_END();
700 }
701 else
702 {
703 /*
704 * 256-bit: Register, Memory
705 */
706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
707 IEM_MC_LOCAL(RTUINT256U, uSrc);
708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
709
710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
714
715 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
716 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
717
718 IEM_MC_ADVANCE_RIP_AND_FINISH();
719 IEM_MC_END();
720 }
721}
722
723
724/**
725 * @opcode 0x10
726 * @oppfx 0x66
727 * @opcpuid avx
728 * @opgroup og_avx_simdfp_datamove
729 * @opxcpttype 4UA
730 * @optest op1=1 op2=2 -> op1=2
731 * @optest op1=0 op2=-22 -> op1=-22
732 */
733FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
734{
735 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
736 Assert(pVCpu->iem.s.uVexLength <= 1);
737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
738 if (IEM_IS_MODRM_REG_MODE(bRm))
739 {
740 /*
741 * Register, register.
742 */
743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
747 if (pVCpu->iem.s.uVexLength == 0)
748 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
749 IEM_GET_MODRM_RM(pVCpu, bRm));
750 else
751 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
752 IEM_GET_MODRM_RM(pVCpu, bRm));
753 IEM_MC_ADVANCE_RIP_AND_FINISH();
754 IEM_MC_END();
755 }
756 else if (pVCpu->iem.s.uVexLength == 0)
757 {
758 /*
759 * 128-bit: Memory, register.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776 else
777 {
778 /*
779 * 256-bit: Memory, register.
780 */
781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
782 IEM_MC_LOCAL(RTUINT256U, uSrc);
783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
784
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
788 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
789
790 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
791 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
792
793 IEM_MC_ADVANCE_RIP_AND_FINISH();
794 IEM_MC_END();
795 }
796}
797
798
799FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
800{
801 Assert(pVCpu->iem.s.uVexLength <= 1);
802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
803 if (IEM_IS_MODRM_REG_MODE(bRm))
804 {
805 /**
806 * @opcode 0x10
807 * @oppfx 0xf3
808 * @opcodesub 11 mr/reg
809 * @opcpuid avx
810 * @opgroup og_avx_simdfp_datamerge
811 * @opxcpttype 5
812 * @optest op1=1 op2=0 op3=2 -> op1=2
813 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
814 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
815 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
816 * @note HssHi refers to bits 127:32.
817 */
818 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
819 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
823 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
824 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
825 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
826 IEM_MC_ADVANCE_RIP_AND_FINISH();
827 IEM_MC_END();
828 }
829 else
830 {
831 /**
832 * @opdone
833 * @opcode 0x10
834 * @oppfx 0xf3
835 * @opcodesub !11 mr/reg
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 5
839 * @opfunction iemOp_vmovss_Vss_Hss_Wss
840 * @optest op1=1 op2=2 -> op1=2
841 * @optest op1=0 op2=-22 -> op1=-22
842 */
843 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
844 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
845 IEM_MC_LOCAL(uint32_t, uSrc);
846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
847
848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
852
853 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
854 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
855
856 IEM_MC_ADVANCE_RIP_AND_FINISH();
857 IEM_MC_END();
858 }
859}
860
861
862FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
863{
864 Assert(pVCpu->iem.s.uVexLength <= 1);
865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
866 if (IEM_IS_MODRM_REG_MODE(bRm))
867 {
868 /**
869 * @opcode 0x10
870 * @oppfx 0xf2
871 * @opcodesub 11 mr/reg
872 * @opcpuid avx
873 * @opgroup og_avx_simdfp_datamerge
874 * @opxcpttype 5
875 * @optest op1=1 op2=0 op3=2 -> op1=2
876 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
877 * @optest op1=3 op2=-1 op3=0x77 ->
878 * op1=0xffffffffffffffff0000000000000077
879 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
880 */
881 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
882 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
883 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
884
885 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
886 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
887 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
888 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
889 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
890 IEM_MC_ADVANCE_RIP_AND_FINISH();
891 IEM_MC_END();
892 }
893 else
894 {
895 /**
896 * @opdone
897 * @opcode 0x10
898 * @oppfx 0xf2
899 * @opcodesub !11 mr/reg
900 * @opcpuid avx
901 * @opgroup og_avx_simdfp_datamove
902 * @opxcpttype 5
903 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
904 * @optest op1=1 op2=2 -> op1=2
905 * @optest op1=0 op2=-22 -> op1=-22
906 */
907 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
909 IEM_MC_LOCAL(uint64_t, uSrc);
910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
911
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
915 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
916
917 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
918 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
919
920 IEM_MC_ADVANCE_RIP_AND_FINISH();
921 IEM_MC_END();
922 }
923}
924
925
926/**
927 * @opcode 0x11
928 * @oppfx none
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamove
931 * @opxcpttype 4UA
932 * @optest op1=1 op2=2 -> op1=2
933 * @optest op1=0 op2=-22 -> op1=-22
934 */
935FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
936{
937 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
938 Assert(pVCpu->iem.s.uVexLength <= 1);
939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
940 if (IEM_IS_MODRM_REG_MODE(bRm))
941 {
942 /*
943 * Register, register.
944 */
945 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
949 if (pVCpu->iem.s.uVexLength == 0)
950 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
951 IEM_GET_MODRM_REG(pVCpu, bRm));
952 else
953 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
954 IEM_GET_MODRM_REG(pVCpu, bRm));
955 IEM_MC_ADVANCE_RIP_AND_FINISH();
956 IEM_MC_END();
957 }
958 else if (pVCpu->iem.s.uVexLength == 0)
959 {
960 /*
961 * 128-bit: Memory, register.
962 */
963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
964 IEM_MC_LOCAL(RTUINT128U, uSrc);
965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
966
967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
971
972 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
973 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
974
975 IEM_MC_ADVANCE_RIP_AND_FINISH();
976 IEM_MC_END();
977 }
978 else
979 {
980 /*
981 * 256-bit: Memory, register.
982 */
983 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
984 IEM_MC_LOCAL(RTUINT256U, uSrc);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
991
992 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
993 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
994
995 IEM_MC_ADVANCE_RIP_AND_FINISH();
996 IEM_MC_END();
997 }
998}
999
1000
1001/**
1002 * @opcode 0x11
1003 * @oppfx 0x66
1004 * @opcpuid avx
1005 * @opgroup og_avx_simdfp_datamove
1006 * @opxcpttype 4UA
1007 * @optest op1=1 op2=2 -> op1=2
1008 * @optest op1=0 op2=-22 -> op1=-22
1009 */
1010FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
1011{
1012 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1021 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1024 if (pVCpu->iem.s.uVexLength == 0)
1025 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1026 IEM_GET_MODRM_REG(pVCpu, bRm));
1027 else
1028 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1029 IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_ADVANCE_RIP_AND_FINISH();
1031 IEM_MC_END();
1032 }
1033 else if (pVCpu->iem.s.uVexLength == 0)
1034 {
1035 /*
1036 * 128-bit: Memory, register.
1037 */
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT128U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1046
1047 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1048 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 else
1054 {
1055 /*
1056 * 256-bit: Memory, register.
1057 */
1058 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1059 IEM_MC_LOCAL(RTUINT256U, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1066
1067 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1068 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1069
1070 IEM_MC_ADVANCE_RIP_AND_FINISH();
1071 IEM_MC_END();
1072 }
1073}
1074
1075
1076FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1077{
1078 Assert(pVCpu->iem.s.uVexLength <= 1);
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /**
1083 * @opcode 0x11
1084 * @oppfx 0xf3
1085 * @opcodesub 11 mr/reg
1086 * @opcpuid avx
1087 * @opgroup og_avx_simdfp_datamerge
1088 * @opxcpttype 5
1089 * @optest op1=1 op2=0 op3=2 -> op1=2
1090 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1091 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1092 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1093 */
1094 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1096 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1097
1098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1100 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1101 IEM_GET_MODRM_REG(pVCpu, bRm),
1102 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1103 IEM_MC_ADVANCE_RIP_AND_FINISH();
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /**
1109 * @opdone
1110 * @opcode 0x11
1111 * @oppfx 0xf3
1112 * @opcodesub !11 mr/reg
1113 * @opcpuid avx
1114 * @opgroup og_avx_simdfp_datamove
1115 * @opxcpttype 5
1116 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1117 * @optest op1=1 op2=2 -> op1=2
1118 * @optest op1=0 op2=-22 -> op1=-22
1119 */
1120 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1122 IEM_MC_LOCAL(uint32_t, uSrc);
1123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1124
1125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1128 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1129
1130 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1131 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136}
1137
1138
1139FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1140{
1141 Assert(pVCpu->iem.s.uVexLength <= 1);
1142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1143 if (IEM_IS_MODRM_REG_MODE(bRm))
1144 {
1145 /**
1146 * @opcode 0x11
1147 * @oppfx 0xf2
1148 * @opcodesub 11 mr/reg
1149 * @opcpuid avx
1150 * @opgroup og_avx_simdfp_datamerge
1151 * @opxcpttype 5
1152 * @optest op1=1 op2=0 op3=2 -> op1=2
1153 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1154 * @optest op1=3 op2=-1 op3=0x77 ->
1155 * op1=0xffffffffffffffff0000000000000077
1156 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1157 */
1158 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1159 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1160 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1161
1162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1164 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1165 IEM_GET_MODRM_REG(pVCpu, bRm),
1166 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1167 IEM_MC_ADVANCE_RIP_AND_FINISH();
1168 IEM_MC_END();
1169 }
1170 else
1171 {
1172 /**
1173 * @opdone
1174 * @opcode 0x11
1175 * @oppfx 0xf2
1176 * @opcodesub !11 mr/reg
1177 * @opcpuid avx
1178 * @opgroup og_avx_simdfp_datamove
1179 * @opxcpttype 5
1180 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1181 * @optest op1=1 op2=2 -> op1=2
1182 * @optest op1=0 op2=-22 -> op1=-22
1183 */
1184 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1186 IEM_MC_LOCAL(uint64_t, uSrc);
1187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1188
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1193
1194 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1195 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1196
1197 IEM_MC_ADVANCE_RIP_AND_FINISH();
1198 IEM_MC_END();
1199 }
1200}
1201
1202
1203FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1204{
1205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1206 if (IEM_IS_MODRM_REG_MODE(bRm))
1207 {
1208 /**
1209 * @opcode 0x12
1210 * @opcodesub 11 mr/reg
1211 * @oppfx none
1212 * @opcpuid avx
1213 * @opgroup og_avx_simdfp_datamerge
1214 * @opxcpttype 7LZ
1215 * @optest op2=0x2200220122022203
1216 * op3=0x3304330533063307
1217 * -> op1=0x22002201220222033304330533063307
1218 * @optest op2=-1 op3=-42 -> op1=-42
1219 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1220 */
1221 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1224
1225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1227 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1228 IEM_GET_MODRM_RM(pVCpu, bRm),
1229 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 /**
1237 * @opdone
1238 * @opcode 0x12
1239 * @opcodesub !11 mr/reg
1240 * @oppfx none
1241 * @opcpuid avx
1242 * @opgroup og_avx_simdfp_datamove
1243 * @opxcpttype 5LZ
1244 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1245 * @optest op1=1 op2=0 op3=0 -> op1=0
1246 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1247 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1248 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1249 */
1250 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1251
1252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1253 IEM_MC_LOCAL(uint64_t, uSrc);
1254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1255
1256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1257 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1260
1261 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1262 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1263 uSrc,
1264 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1265
1266 IEM_MC_ADVANCE_RIP_AND_FINISH();
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/**
1273 * @opcode 0x12
1274 * @opcodesub !11 mr/reg
1275 * @oppfx 0x66
1276 * @opcpuid avx
1277 * @opgroup og_avx_pcksclr_datamerge
1278 * @opxcpttype 5LZ
1279 * @optest op2=0 op3=2 -> op1=2
1280 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1281 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1282 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1283 */
1284FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1285{
1286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1287 if (IEM_IS_MODRM_MEM_MODE(bRm))
1288 {
1289 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1290
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(uint64_t, uSrc);
1293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1294
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1298 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1299
1300 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1301 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1302 uSrc,
1303 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1304
1305 IEM_MC_ADVANCE_RIP_AND_FINISH();
1306 IEM_MC_END();
1307 }
1308
1309 /**
1310 * @opdone
1311 * @opmnemonic udvex660f12m3
1312 * @opcode 0x12
1313 * @opcodesub 11 mr/reg
1314 * @oppfx 0x66
1315 * @opunused immediate
1316 * @opcpuid avx
1317 * @optest ->
1318 */
1319 else
1320 IEMOP_RAISE_INVALID_OPCODE_RET();
1321}
1322
1323
1324/**
1325 * @opcode 0x12
1326 * @oppfx 0xf3
1327 * @opcpuid avx
1328 * @opgroup og_avx_pcksclr_datamove
1329 * @opxcpttype 4
1330 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1331 * -> op1=0x00000002000000020000000100000001
1332 * @optest vex.l==1 /
1333 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1334 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1335 */
1336FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1337{
1338 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1339 Assert(pVCpu->iem.s.uVexLength <= 1);
1340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1341 if (IEM_IS_MODRM_REG_MODE(bRm))
1342 {
1343 /*
1344 * Register, register.
1345 */
1346 if (pVCpu->iem.s.uVexLength == 0)
1347 {
1348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1350 IEM_MC_LOCAL(RTUINT128U, uSrc);
1351
1352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1353 IEM_MC_PREPARE_AVX_USAGE();
1354
1355 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1356 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1357 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1358 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1359 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1360 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1361
1362 IEM_MC_ADVANCE_RIP_AND_FINISH();
1363 IEM_MC_END();
1364 }
1365 else
1366 {
1367 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1370 IEM_MC_PREPARE_AVX_USAGE();
1371
1372 IEM_MC_LOCAL(RTUINT256U, uSrc);
1373 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1374 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1375 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1376 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1377 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1378 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1379 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1380 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1381 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1382 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1383
1384 IEM_MC_ADVANCE_RIP_AND_FINISH();
1385 IEM_MC_END();
1386 }
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 if (pVCpu->iem.s.uVexLength == 0)
1394 {
1395 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc);
1397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1400 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1402 IEM_MC_PREPARE_AVX_USAGE();
1403
1404 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1405 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1406 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1407 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1408 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1409 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1410
1411 IEM_MC_ADVANCE_RIP_AND_FINISH();
1412 IEM_MC_END();
1413 }
1414 else
1415 {
1416 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1419 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1420 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1421 IEM_MC_PREPARE_AVX_USAGE();
1422
1423 IEM_MC_LOCAL(RTUINT256U, uSrc);
1424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1425
1426 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1427 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1428 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1429 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1430 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1431 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1432 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1433 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1434 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1435
1436 IEM_MC_ADVANCE_RIP_AND_FINISH();
1437 IEM_MC_END();
1438 }
1439 }
1440}
1441
1442
1443/**
1444 * @opcode 0x12
1445 * @oppfx 0xf2
1446 * @opcpuid avx
1447 * @opgroup og_avx_pcksclr_datamove
1448 * @opxcpttype 5
1449 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1450 * -> op1=0x22222222111111112222222211111111
1451 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1452 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1453 */
1454FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1455{
1456 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1457 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1458 if (IEM_IS_MODRM_REG_MODE(bRm))
1459 {
1460 /*
1461 * Register, register.
1462 */
1463 if (pVCpu->iem.s.uVexLength == 0)
1464 {
1465 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1466 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1467 IEM_MC_LOCAL(uint64_t, uSrc);
1468
1469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1470 IEM_MC_PREPARE_AVX_USAGE();
1471
1472 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1473 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1474 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1475 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1476
1477 IEM_MC_ADVANCE_RIP_AND_FINISH();
1478 IEM_MC_END();
1479 }
1480 else
1481 {
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1485 IEM_MC_PREPARE_AVX_USAGE();
1486
1487 IEM_MC_LOCAL(uint64_t, uSrc1);
1488 IEM_MC_LOCAL(uint64_t, uSrc2);
1489 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1490 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1491
1492 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1493 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1494 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1495 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1496 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1497
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501 }
1502 else
1503 {
1504 /*
1505 * Register, memory.
1506 */
1507 if (pVCpu->iem.s.uVexLength == 0)
1508 {
1509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1511 IEM_MC_LOCAL(uint64_t, uSrc);
1512
1513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1516 IEM_MC_PREPARE_AVX_USAGE();
1517
1518 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1519 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1520 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1521 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1522
1523 IEM_MC_ADVANCE_RIP_AND_FINISH();
1524 IEM_MC_END();
1525 }
1526 else
1527 {
1528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1534 IEM_MC_PREPARE_AVX_USAGE();
1535
1536 IEM_MC_LOCAL(RTUINT256U, uSrc);
1537 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538
1539 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1540 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1541 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1542 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1543 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1544
1545 IEM_MC_ADVANCE_RIP_AND_FINISH();
1546 IEM_MC_END();
1547 }
1548 }
1549}
1550
1551
1552/**
1553 * @opcode 0x13
1554 * @opcodesub !11 mr/reg
1555 * @oppfx none
1556 * @opcpuid avx
1557 * @opgroup og_avx_simdfp_datamove
1558 * @opxcpttype 5
1559 * @optest op1=1 op2=2 -> op1=2
1560 * @optest op1=0 op2=-42 -> op1=-42
1561 */
1562FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1563{
1564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1565 if (IEM_IS_MODRM_MEM_MODE(bRm))
1566 {
1567 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1568
1569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1570 IEM_MC_LOCAL(uint64_t, uSrc);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1574 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1575 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1576 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1577
1578 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584
1585 /**
1586 * @opdone
1587 * @opmnemonic udvex0f13m3
1588 * @opcode 0x13
1589 * @opcodesub 11 mr/reg
1590 * @oppfx none
1591 * @opunused immediate
1592 * @opcpuid avx
1593 * @optest ->
1594 */
1595 else
1596 IEMOP_RAISE_INVALID_OPCODE_RET();
1597}
1598
1599
1600/**
1601 * @opcode 0x13
1602 * @opcodesub !11 mr/reg
1603 * @oppfx 0x66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 5
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 */
1610FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1611{
1612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1613 if (IEM_IS_MODRM_MEM_MODE(bRm))
1614 {
1615 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1617 IEM_MC_LOCAL(uint64_t, uSrc);
1618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1619
1620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1624
1625 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1626 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1627
1628 IEM_MC_ADVANCE_RIP_AND_FINISH();
1629 IEM_MC_END();
1630 }
1631
1632 /**
1633 * @opdone
1634 * @opmnemonic udvex660f13m3
1635 * @opcode 0x13
1636 * @opcodesub 11 mr/reg
1637 * @oppfx 0x66
1638 * @opunused immediate
1639 * @opcpuid avx
1640 * @optest ->
1641 */
1642 else
1643 IEMOP_RAISE_INVALID_OPCODE_RET();
1644}
1645
1646/* Opcode VEX.F3.0F 0x13 - invalid */
1647/* Opcode VEX.F2.0F 0x13 - invalid */
1648
1649/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1650FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1651{
1652 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1653 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1654 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1655}
1656
1657
1658/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1659FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1660{
1661 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1662 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1663 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1664}
1665
1666
1667/* Opcode VEX.F3.0F 0x14 - invalid */
1668/* Opcode VEX.F2.0F 0x14 - invalid */
1669
1670
1671/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1672FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1673{
1674 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1675 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1676 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1677}
1678
1679
1680/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1681FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1682{
1683 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1684 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1686}
1687
1688
1689/* Opcode VEX.F3.0F 0x15 - invalid */
1690/* Opcode VEX.F2.0F 0x15 - invalid */
1691
1692
1693FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1694{
1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1696 if (IEM_IS_MODRM_REG_MODE(bRm))
1697 {
1698 /**
1699 * @opcode 0x16
1700 * @opcodesub 11 mr/reg
1701 * @oppfx none
1702 * @opcpuid avx
1703 * @opgroup og_avx_simdfp_datamerge
1704 * @opxcpttype 7LZ
1705 */
1706 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1707
1708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1709 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1710
1711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1713 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1714 IEM_GET_MODRM_RM(pVCpu, bRm),
1715 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 /**
1723 * @opdone
1724 * @opcode 0x16
1725 * @opcodesub !11 mr/reg
1726 * @oppfx none
1727 * @opcpuid avx
1728 * @opgroup og_avx_simdfp_datamove
1729 * @opxcpttype 5LZ
1730 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1731 */
1732 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733
1734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1735 IEM_MC_LOCAL(uint64_t, uSrc);
1736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1737
1738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1739 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1742
1743 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1744 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1745 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1746 uSrc);
1747
1748 IEM_MC_ADVANCE_RIP_AND_FINISH();
1749 IEM_MC_END();
1750 }
1751}
1752
1753
1754/**
1755 * @opcode 0x16
1756 * @opcodesub !11 mr/reg
1757 * @oppfx 0x66
1758 * @opcpuid avx
1759 * @opgroup og_avx_pcksclr_datamerge
1760 * @opxcpttype 5LZ
1761 */
1762FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1763{
1764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1765 if (IEM_IS_MODRM_MEM_MODE(bRm))
1766 {
1767 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1768
1769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1770 IEM_MC_LOCAL(uint64_t, uSrc);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1774 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1777
1778 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1779 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1780 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1781 uSrc);
1782
1783 IEM_MC_ADVANCE_RIP_AND_FINISH();
1784 IEM_MC_END();
1785 }
1786
1787 /**
1788 * @opdone
1789 * @opmnemonic udvex660f16m3
1790 * @opcode 0x12
1791 * @opcodesub 11 mr/reg
1792 * @oppfx 0x66
1793 * @opunused immediate
1794 * @opcpuid avx
1795 * @optest ->
1796 */
1797 else
1798 IEMOP_RAISE_INVALID_OPCODE_RET();
1799}
1800
1801
1802/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1803/**
1804 * @opcode 0x16
1805 * @oppfx 0xf3
1806 * @opcpuid avx
1807 * @opgroup og_avx_pcksclr_datamove
1808 * @opxcpttype 4
1809 */
1810FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1811{
1812 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1813 Assert(pVCpu->iem.s.uVexLength <= 1);
1814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1815 if (IEM_IS_MODRM_REG_MODE(bRm))
1816 {
1817 /*
1818 * Register, register.
1819 */
1820 if (pVCpu->iem.s.uVexLength == 0)
1821 {
1822 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1823 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1824 IEM_MC_LOCAL(RTUINT128U, uSrc);
1825
1826 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1827 IEM_MC_PREPARE_AVX_USAGE();
1828
1829 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1830 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1831 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1832 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1833 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1834 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1835
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1844 IEM_MC_PREPARE_AVX_USAGE();
1845
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1848 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1849 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1850 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1851 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1852 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1853 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1854 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1855 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1856 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 }
1862 else
1863 {
1864 /*
1865 * Register, memory.
1866 */
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 {
1869 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1870 IEM_MC_LOCAL(RTUINT128U, uSrc);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872
1873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1874 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1876 IEM_MC_PREPARE_AVX_USAGE();
1877
1878 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1880 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1881 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1882 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1883 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1884
1885 IEM_MC_ADVANCE_RIP_AND_FINISH();
1886 IEM_MC_END();
1887 }
1888 else
1889 {
1890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1895 IEM_MC_PREPARE_AVX_USAGE();
1896
1897 IEM_MC_LOCAL(RTUINT256U, uSrc);
1898 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1899
1900 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1901 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1902 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1903 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1904 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1905 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1906 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1907 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1908 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1909
1910 IEM_MC_ADVANCE_RIP_AND_FINISH();
1911 IEM_MC_END();
1912 }
1913 }
1914}
1915
1916
1917/* Opcode VEX.F2.0F 0x16 - invalid */
1918
1919
1920/**
1921 * @opcode 0x17
1922 * @opcodesub !11 mr/reg
1923 * @oppfx none
1924 * @opcpuid avx
1925 * @opgroup og_avx_simdfp_datamove
1926 * @opxcpttype 5
1927 */
1928FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1929{
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 if (IEM_IS_MODRM_MEM_MODE(bRm))
1932 {
1933 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1934
1935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1936 IEM_MC_LOCAL(uint64_t, uSrc);
1937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1938
1939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1940 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1941 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1942 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1943
1944 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1945 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1946
1947 IEM_MC_ADVANCE_RIP_AND_FINISH();
1948 IEM_MC_END();
1949 }
1950
1951 /**
1952 * @opdone
1953 * @opmnemonic udvex0f17m3
1954 * @opcode 0x17
1955 * @opcodesub 11 mr/reg
1956 * @oppfx none
1957 * @opunused immediate
1958 * @opcpuid avx
1959 * @optest ->
1960 */
1961 else
1962 IEMOP_RAISE_INVALID_OPCODE_RET();
1963}
1964
1965
1966/**
1967 * @opcode 0x17
1968 * @opcodesub !11 mr/reg
1969 * @oppfx 0x66
1970 * @opcpuid avx
1971 * @opgroup og_avx_pcksclr_datamove
1972 * @opxcpttype 5
1973 */
1974FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1975{
1976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1977 if (IEM_IS_MODRM_MEM_MODE(bRm))
1978 {
1979 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1980
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(uint64_t, uSrc);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1991 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996
1997 /**
1998 * @opdone
1999 * @opmnemonic udvex660f17m3
2000 * @opcode 0x17
2001 * @opcodesub 11 mr/reg
2002 * @oppfx 0x66
2003 * @opunused immediate
2004 * @opcpuid avx
2005 * @optest ->
2006 */
2007 else
2008 IEMOP_RAISE_INVALID_OPCODE_RET();
2009}
2010
2011
2012/* Opcode VEX.F3.0F 0x17 - invalid */
2013/* Opcode VEX.F2.0F 0x17 - invalid */
2014
2015
2016/* Opcode VEX.0F 0x18 - invalid */
2017/* Opcode VEX.0F 0x19 - invalid */
2018/* Opcode VEX.0F 0x1a - invalid */
2019/* Opcode VEX.0F 0x1b - invalid */
2020/* Opcode VEX.0F 0x1c - invalid */
2021/* Opcode VEX.0F 0x1d - invalid */
2022/* Opcode VEX.0F 0x1e - invalid */
2023/* Opcode VEX.0F 0x1f - invalid */
2024
2025/* Opcode VEX.0F 0x20 - invalid */
2026/* Opcode VEX.0F 0x21 - invalid */
2027/* Opcode VEX.0F 0x22 - invalid */
2028/* Opcode VEX.0F 0x23 - invalid */
2029/* Opcode VEX.0F 0x24 - invalid */
2030/* Opcode VEX.0F 0x25 - invalid */
2031/* Opcode VEX.0F 0x26 - invalid */
2032/* Opcode VEX.0F 0x27 - invalid */
2033
2034/**
2035 * @opcode 0x28
2036 * @oppfx none
2037 * @opcpuid avx
2038 * @opgroup og_avx_pcksclr_datamove
2039 * @opxcpttype 1
2040 * @optest op1=1 op2=2 -> op1=2
2041 * @optest op1=0 op2=-42 -> op1=-42
2042 * @note Almost identical to vmovapd.
2043 */
2044FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2045{
2046 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2048 Assert(pVCpu->iem.s.uVexLength <= 1);
2049 if (IEM_IS_MODRM_REG_MODE(bRm))
2050 {
2051 /*
2052 * Register, register.
2053 */
2054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2055 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2056
2057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2059 if (pVCpu->iem.s.uVexLength == 0)
2060 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2061 IEM_GET_MODRM_RM(pVCpu, bRm));
2062 else
2063 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2064 IEM_GET_MODRM_RM(pVCpu, bRm));
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067 }
2068 else
2069 {
2070 /*
2071 * Register, memory.
2072 */
2073 if (pVCpu->iem.s.uVexLength == 0)
2074 {
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT128U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2083
2084 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2085 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 else
2091 {
2092 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2094 IEM_MC_LOCAL(RTUINT256U, uSrc);
2095
2096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2097 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2100
2101 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2102 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2103
2104 IEM_MC_ADVANCE_RIP_AND_FINISH();
2105 IEM_MC_END();
2106 }
2107 }
2108}
2109
2110
2111/**
2112 * @opcode 0x28
2113 * @oppfx 66
2114 * @opcpuid avx
2115 * @opgroup og_avx_pcksclr_datamove
2116 * @opxcpttype 1
2117 * @optest op1=1 op2=2 -> op1=2
2118 * @optest op1=0 op2=-42 -> op1=-42
2119 * @note Almost identical to vmovaps
2120 */
2121FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2122{
2123 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2125 Assert(pVCpu->iem.s.uVexLength <= 1);
2126 if (IEM_IS_MODRM_REG_MODE(bRm))
2127 {
2128 /*
2129 * Register, register.
2130 */
2131 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2132 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2136 if (pVCpu->iem.s.uVexLength == 0)
2137 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2138 IEM_GET_MODRM_RM(pVCpu, bRm));
2139 else
2140 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2141 IEM_GET_MODRM_RM(pVCpu, bRm));
2142 IEM_MC_ADVANCE_RIP_AND_FINISH();
2143 IEM_MC_END();
2144 }
2145 else
2146 {
2147 /*
2148 * Register, memory.
2149 */
2150 if (pVCpu->iem.s.uVexLength == 0)
2151 {
2152 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2154 IEM_MC_LOCAL(RTUINT128U, uSrc);
2155
2156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2158 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2162 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2163
2164 IEM_MC_ADVANCE_RIP_AND_FINISH();
2165 IEM_MC_END();
2166 }
2167 else
2168 {
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171 IEM_MC_LOCAL(RTUINT256U, uSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184 }
2185}
2186
2187/**
2188 * @opmnemonic udvexf30f28
2189 * @opcode 0x28
2190 * @oppfx 0xf3
2191 * @opunused vex.modrm
2192 * @opcpuid avx
2193 * @optest ->
2194 * @opdone
2195 */
2196
2197/**
2198 * @opmnemonic udvexf20f28
2199 * @opcode 0x28
2200 * @oppfx 0xf2
2201 * @opunused vex.modrm
2202 * @opcpuid avx
2203 * @optest ->
2204 * @opdone
2205 */
2206
2207/**
2208 * @opcode 0x29
2209 * @oppfx none
2210 * @opcpuid avx
2211 * @opgroup og_avx_pcksclr_datamove
2212 * @opxcpttype 1
2213 * @optest op1=1 op2=2 -> op1=2
2214 * @optest op1=0 op2=-42 -> op1=-42
2215 * @note Almost identical to vmovapd.
2216 */
2217FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2218{
2219 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 if (IEM_IS_MODRM_REG_MODE(bRm))
2223 {
2224 /*
2225 * Register, register.
2226 */
2227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232 if (pVCpu->iem.s.uVexLength == 0)
2233 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2234 IEM_GET_MODRM_REG(pVCpu, bRm));
2235 else
2236 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2237 IEM_GET_MODRM_REG(pVCpu, bRm));
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241 else
2242 {
2243 /*
2244 * Register, memory.
2245 */
2246 if (pVCpu->iem.s.uVexLength == 0)
2247 {
2248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250 IEM_MC_LOCAL(RTUINT128U, uSrc);
2251
2252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2253 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2256
2257 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2258 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2259
2260 IEM_MC_ADVANCE_RIP_AND_FINISH();
2261 IEM_MC_END();
2262 }
2263 else
2264 {
2265 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2267 IEM_MC_LOCAL(RTUINT256U, uSrc);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2270 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2271 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2272 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2273
2274 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2275 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2276
2277 IEM_MC_ADVANCE_RIP_AND_FINISH();
2278 IEM_MC_END();
2279 }
2280 }
2281}
2282
2283/**
2284 * @opcode 0x29
2285 * @oppfx 66
2286 * @opcpuid avx
2287 * @opgroup og_avx_pcksclr_datamove
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Almost identical to vmovaps
2292 */
2293FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_REG_MODE(bRm))
2299 {
2300 /*
2301 * Register, register.
2302 */
2303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2305
2306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2308 if (pVCpu->iem.s.uVexLength == 0)
2309 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2310 IEM_GET_MODRM_REG(pVCpu, bRm));
2311 else
2312 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2313 IEM_GET_MODRM_REG(pVCpu, bRm));
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 }
2317 else
2318 {
2319 /*
2320 * Register, memory.
2321 */
2322 if (pVCpu->iem.s.uVexLength == 0)
2323 {
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2332
2333 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2334 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2343 IEM_MC_LOCAL(RTUINT256U, uSrc);
2344
2345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2349
2350 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2351 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 }
2357}
2358
2359
2360/**
2361 * @opmnemonic udvexf30f29
2362 * @opcode 0x29
2363 * @oppfx 0xf3
2364 * @opunused vex.modrm
2365 * @opcpuid avx
2366 * @optest ->
2367 * @opdone
2368 */
2369
2370/**
2371 * @opmnemonic udvexf20f29
2372 * @opcode 0x29
2373 * @oppfx 0xf2
2374 * @opunused vex.modrm
2375 * @opcpuid avx
2376 * @optest ->
2377 * @opdone
2378 */
2379
2380
2381/** Opcode VEX.0F 0x2a - invalid */
2382/** Opcode VEX.66.0F 0x2a - invalid */
2383
2384
2385/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2386FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2387{
2388 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2389 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2392 {
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /* XMM, greg64 */
2396 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2397 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2398 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2399 IEM_MC_PREPARE_AVX_USAGE();
2400
2401 IEM_MC_LOCAL(X86XMMREG, uDst);
2402 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2403 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2404 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2405 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2406 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2407 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2408 puDst, puSrc1, pi64Src2);
2409 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2411 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2412 IEM_MC_ADVANCE_RIP_AND_FINISH();
2413 IEM_MC_END();
2414 }
2415 else
2416 {
2417 /* XMM, [mem64] */
2418 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2421 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_PREPARE_AVX_USAGE();
2424
2425 IEM_MC_LOCAL(X86XMMREG, uDst);
2426 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2427 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2428 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2429 IEM_MC_LOCAL(int64_t, i64Src2);
2430 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2431 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2433 puDst, puSrc1, pi64Src2);
2434 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2435 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440 }
2441 else
2442 {
2443 if (IEM_IS_MODRM_REG_MODE(bRm))
2444 {
2445 /* XMM, greg32 */
2446 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2447 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2449 IEM_MC_PREPARE_AVX_USAGE();
2450
2451 IEM_MC_LOCAL(X86XMMREG, uDst);
2452 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2453 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2454 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2455 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2456 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2457 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2458 puDst, puSrc1, pi32Src2);
2459 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2460 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_ADVANCE_RIP_AND_FINISH();
2463 IEM_MC_END();
2464 }
2465 else
2466 {
2467 /* XMM, [mem32] */
2468 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474
2475 IEM_MC_LOCAL(X86XMMREG, uDst);
2476 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2477 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2478 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2479 IEM_MC_LOCAL(int32_t, i32Src2);
2480 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2481 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2482 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2483 puDst, puSrc1, pi32Src2);
2484 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2485 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 }
2491}
2492
2493
2494/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2495FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2496{
2497 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2498 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2501 {
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /* XMM, greg64 */
2505 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2506 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2508 IEM_MC_PREPARE_AVX_USAGE();
2509
2510 IEM_MC_LOCAL(X86XMMREG, uDst);
2511 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2513 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2514 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2515 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2516 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2517 puDst, puSrc1, pi64Src2);
2518 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2519 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2520 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2521 IEM_MC_ADVANCE_RIP_AND_FINISH();
2522 IEM_MC_END();
2523 }
2524 else
2525 {
2526 /* XMM, [mem64] */
2527 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2530 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2532 IEM_MC_PREPARE_AVX_USAGE();
2533
2534 IEM_MC_LOCAL(X86XMMREG, uDst);
2535 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2538 IEM_MC_LOCAL(int64_t, i64Src2);
2539 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2540 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2541 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2542 puDst, puSrc1, pi64Src2);
2543 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2545 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_ADVANCE_RIP_AND_FINISH();
2547 IEM_MC_END();
2548 }
2549 }
2550 else
2551 {
2552 if (IEM_IS_MODRM_REG_MODE(bRm))
2553 {
2554 /* XMM, greg32 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_PREPARE_AVX_USAGE();
2559
2560 IEM_MC_LOCAL(X86XMMREG, uDst);
2561 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2562 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2564 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2565 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2567 puDst, puSrc1, pi32Src2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2570 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /* XMM, [mem32] */
2577 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2582 IEM_MC_PREPARE_AVX_USAGE();
2583
2584 IEM_MC_LOCAL(X86XMMREG, uDst);
2585 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2586 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2587 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2588 IEM_MC_LOCAL(int32_t, i32Src2);
2589 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2590 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2591 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2592 puDst, puSrc1, pi32Src2);
2593 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2594 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2595 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_ADVANCE_RIP_AND_FINISH();
2597 IEM_MC_END();
2598 }
2599 }
2600}
2601
2602
2603/**
2604 * @opcode 0x2b
2605 * @opcodesub !11 mr/reg
2606 * @oppfx none
2607 * @opcpuid avx
2608 * @opgroup og_avx_cachect
2609 * @opxcpttype 1
2610 * @optest op1=1 op2=2 -> op1=2
2611 * @optest op1=0 op2=-42 -> op1=-42
2612 * @note Identical implementation to vmovntpd
2613 */
2614FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2615{
2616 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2617 Assert(pVCpu->iem.s.uVexLength <= 1);
2618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2619 if (IEM_IS_MODRM_MEM_MODE(bRm))
2620 {
2621 /*
2622 * memory, register.
2623 */
2624 if (pVCpu->iem.s.uVexLength == 0)
2625 {
2626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2627 IEM_MC_LOCAL(RTUINT128U, uSrc);
2628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2629
2630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2633 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2636 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2637
2638 IEM_MC_ADVANCE_RIP_AND_FINISH();
2639 IEM_MC_END();
2640 }
2641 else
2642 {
2643 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2644 IEM_MC_LOCAL(RTUINT256U, uSrc);
2645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2646
2647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2648 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2649 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2650 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2651
2652 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 }
2659 /* The register, register encoding is invalid. */
2660 else
2661 IEMOP_RAISE_INVALID_OPCODE_RET();
2662}
2663
2664/**
2665 * @opcode 0x2b
2666 * @opcodesub !11 mr/reg
2667 * @oppfx 0x66
2668 * @opcpuid avx
2669 * @opgroup og_avx_cachect
2670 * @opxcpttype 1
2671 * @optest op1=1 op2=2 -> op1=2
2672 * @optest op1=0 op2=-42 -> op1=-42
2673 * @note Identical implementation to vmovntps
2674 */
2675FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2676{
2677 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2678 Assert(pVCpu->iem.s.uVexLength <= 1);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_MEM_MODE(bRm))
2681 {
2682 /*
2683 * memory, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690
2691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2695
2696 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2697 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEM_MC_LOCAL(RTUINT256U, uSrc);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2707
2708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2709 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2712
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2714 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2715
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* The register, register encoding is invalid. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725/**
2726 * @opmnemonic udvexf30f2b
2727 * @opcode 0x2b
2728 * @oppfx 0xf3
2729 * @opunused vex.modrm
2730 * @opcpuid avx
2731 * @optest ->
2732 * @opdone
2733 */
2734
2735/**
2736 * @opmnemonic udvexf20f2b
2737 * @opcode 0x2b
2738 * @oppfx 0xf2
2739 * @opunused vex.modrm
2740 * @opcpuid avx
2741 * @optest ->
2742 * @opdone
2743 */
2744
2745
2746/* Opcode VEX.0F 0x2c - invalid */
2747/* Opcode VEX.66.0F 0x2c - invalid */
2748
2749#define IEMOP_VCVTXSS2SI_Gy_Wss_BODY(a_Instr) \
2750 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2752 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2753 { \
2754 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2755 { \
2756 /* greg64, XMM */ \
2757 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2758 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2760 IEM_MC_PREPARE_AVX_USAGE(); \
2761 IEM_MC_LOCAL( int64_t, i64Dst); \
2762 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2763 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2764 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2765 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2766 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2767 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2768 pi64Dst, pr32Src); \
2769 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2770 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2772 IEM_MC_END(); \
2773 } \
2774 else \
2775 { \
2776 /* greg64, [mem64] */ \
2777 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2780 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2781 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2782 IEM_MC_PREPARE_AVX_USAGE(); \
2783 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2784 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2785 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2786 IEM_MC_LOCAL( int64_t, i64Dst); \
2787 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2788 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2789 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2790 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2791 pi64Dst, pr32Src); \
2792 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2793 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2795 IEM_MC_END(); \
2796 } \
2797 } \
2798 else \
2799 { \
2800 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2801 { \
2802 /* greg, XMM */ \
2803 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2804 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2805 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2806 IEM_MC_PREPARE_AVX_USAGE(); \
2807 IEM_MC_LOCAL( int32_t, i32Dst); \
2808 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2809 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2810 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2812 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2813 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2814 pi32Dst, pr32Src); \
2815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2816 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2818 IEM_MC_END(); \
2819 } \
2820 else \
2821 { \
2822 /* greg, [mem] */ \
2823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2826 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2828 IEM_MC_PREPARE_AVX_USAGE(); \
2829 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2830 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2831 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2832 IEM_MC_LOCAL( int32_t, i32Dst); \
2833 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2834 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2835 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2836 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2837 pi32Dst, pr32Src); \
2838 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2839 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2841 IEM_MC_END(); \
2842 } \
2843 } \
2844 (void)0
2845
2846
2847#define IEMOP_VCVTXSD2SI_Gy_Wss_BODY(a_Instr) \
2848 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2850 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2851 { \
2852 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2853 { \
2854 /* greg64, XMM */ \
2855 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2856 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2857 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2858 IEM_MC_PREPARE_AVX_USAGE(); \
2859 IEM_MC_LOCAL( int64_t, i64Dst); \
2860 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2861 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2862 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2863 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2864 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2865 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2866 pi64Dst, pr64Src); \
2867 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2868 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2870 IEM_MC_END(); \
2871 } \
2872 else \
2873 { \
2874 /* greg64, [mem64] */ \
2875 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2878 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2880 IEM_MC_PREPARE_AVX_USAGE(); \
2881 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2882 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2883 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2884 IEM_MC_LOCAL( int64_t, i64Dst); \
2885 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2886 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2887 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2888 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2889 pi64Dst, pr64Src); \
2890 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2891 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2893 IEM_MC_END(); \
2894 } \
2895 } \
2896 else \
2897 { \
2898 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2899 { \
2900 /* greg, XMM */ \
2901 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2902 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2903 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2904 IEM_MC_PREPARE_AVX_USAGE(); \
2905 IEM_MC_LOCAL( int32_t, i32Dst); \
2906 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2907 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2908 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2909 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2910 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2911 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2912 pi32Dst, pr64Src); \
2913 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2914 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2916 IEM_MC_END(); \
2917 } \
2918 else \
2919 { \
2920 /* greg, [mem] */ \
2921 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2924 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2926 IEM_MC_PREPARE_AVX_USAGE(); \
2927 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2929 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2930 IEM_MC_LOCAL( int32_t, i32Dst); \
2931 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2932 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2933 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2934 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2935 pi32Dst, pr64Src); \
2936 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
2937 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2939 IEM_MC_END(); \
2940 } \
2941 } \
2942 (void)0
2943
2944
2945/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2946FNIEMOP_DEF(iemOp_vcvttss2si_Gy_Wss)
2947{
2948 IEMOP_MNEMONIC2(VEX_RM, VCVTTSS2SI, vcvttss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvttss2si);
2950}
2951
2952
2953/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2954FNIEMOP_DEF(iemOp_vcvttsd2si_Gy_Wsd)
2955{
2956 IEMOP_MNEMONIC2(VEX_RM, VCVTTSD2SI, vcvttsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2957 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvttsd2si);
2958}
2959
2960
2961/* Opcode VEX.0F 0x2d - invalid */
2962/* Opcode VEX.66.0F 0x2d - invalid */
2963
2964
2965/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2966FNIEMOP_DEF(iemOp_vcvtss2si_Gy_Wss)
2967{
2968 IEMOP_MNEMONIC2(VEX_RM, VCVTSS2SI, vcvtss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2969 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvtss2si);
2970}
2971
2972
2973/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2974FNIEMOP_DEF(iemOp_vcvtsd2si_Gy_Wsd)
2975{
2976 IEMOP_MNEMONIC2(VEX_RM, VCVTSD2SI, vcvtsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2977 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvtsd2si);
2978}
2979
2980
2981
2982/**
2983 * @opcode 0x2e
2984 * @oppfx none
2985 * @opflmodify cf,pf,af,zf,sf,of
2986 * @opflclear af,sf,of
2987 */
2988FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2989{
2990 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2992 if (IEM_IS_MODRM_REG_MODE(bRm))
2993 {
2994 /*
2995 * Register, register.
2996 */
2997 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2999 IEM_MC_LOCAL(uint32_t, fEFlags);
3000 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3001 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3002 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3003 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3004 IEM_MC_PREPARE_AVX_USAGE();
3005 IEM_MC_FETCH_EFLAGS(fEFlags);
3006 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3007 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3008 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3009 pEFlags, uSrc1, uSrc2);
3010 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3011 IEM_MC_COMMIT_EFLAGS(fEFlags);
3012
3013 IEM_MC_ADVANCE_RIP_AND_FINISH();
3014 IEM_MC_END();
3015 }
3016 else
3017 {
3018 /*
3019 * Register, memory.
3020 */
3021 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3022 IEM_MC_LOCAL(uint32_t, fEFlags);
3023 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3024 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3025 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3027
3028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3031 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3032
3033 IEM_MC_PREPARE_AVX_USAGE();
3034 IEM_MC_FETCH_EFLAGS(fEFlags);
3035 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3036 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3037 pEFlags, uSrc1, uSrc2);
3038 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3039 IEM_MC_COMMIT_EFLAGS(fEFlags);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044}
3045
3046
3047/**
3048 * @opcode 0x2e
3049 * @oppfx 0x66
3050 * @opflmodify cf,pf,af,zf,sf,of
3051 * @opflclear af,sf,of
3052 */
3053FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
3054{
3055 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * Register, register.
3061 */
3062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3064 IEM_MC_LOCAL(uint32_t, fEFlags);
3065 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3066 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3067 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3069 IEM_MC_PREPARE_AVX_USAGE();
3070 IEM_MC_FETCH_EFLAGS(fEFlags);
3071 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3072 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3073 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3074 pEFlags, uSrc1, uSrc2);
3075 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3076 IEM_MC_COMMIT_EFLAGS(fEFlags);
3077
3078 IEM_MC_ADVANCE_RIP_AND_FINISH();
3079 IEM_MC_END();
3080 }
3081 else
3082 {
3083 /*
3084 * Register, memory.
3085 */
3086 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3087 IEM_MC_LOCAL(uint32_t, fEFlags);
3088 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3089 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3090 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3092
3093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3094 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3095 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3096 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3097
3098 IEM_MC_PREPARE_AVX_USAGE();
3099 IEM_MC_FETCH_EFLAGS(fEFlags);
3100 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3101 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3102 pEFlags, uSrc1, uSrc2);
3103 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3104 IEM_MC_COMMIT_EFLAGS(fEFlags);
3105
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 }
3109}
3110
3111
3112/* Opcode VEX.F3.0F 0x2e - invalid */
3113/* Opcode VEX.F2.0F 0x2e - invalid */
3114
3115/**
3116 * @opcode 0x2f
3117 * @oppfx none
3118 * @opflmodify cf,pf,af,zf,sf,of
3119 * @opflclear af,sf,of
3120 */
3121FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
3122{
3123 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 if (IEM_IS_MODRM_REG_MODE(bRm))
3126 {
3127 /*
3128 * Register, register.
3129 */
3130 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3131 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3132 IEM_MC_LOCAL(uint32_t, fEFlags);
3133 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3134 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3135 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3137 IEM_MC_PREPARE_AVX_USAGE();
3138 IEM_MC_FETCH_EFLAGS(fEFlags);
3139 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3140 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3141 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3142 pEFlags, uSrc1, uSrc2);
3143 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3144 IEM_MC_COMMIT_EFLAGS(fEFlags);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /*
3152 * Register, memory.
3153 */
3154 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3155 IEM_MC_LOCAL(uint32_t, fEFlags);
3156 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3157 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3158 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3164 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3165
3166 IEM_MC_PREPARE_AVX_USAGE();
3167 IEM_MC_FETCH_EFLAGS(fEFlags);
3168 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3169 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3170 pEFlags, uSrc1, uSrc2);
3171 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3172 IEM_MC_COMMIT_EFLAGS(fEFlags);
3173
3174 IEM_MC_ADVANCE_RIP_AND_FINISH();
3175 IEM_MC_END();
3176 }
3177}
3178
3179
3180/**
3181 * @opcode 0x2f
3182 * @oppfx 0x66
3183 * @opflmodify cf,pf,af,zf,sf,of
3184 * @opflclear af,sf,of
3185 */
3186FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
3187{
3188 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 if (IEM_IS_MODRM_REG_MODE(bRm))
3191 {
3192 /*
3193 * Register, register.
3194 */
3195 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_LOCAL(uint32_t, fEFlags);
3198 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3199 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3200 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3202 IEM_MC_PREPARE_AVX_USAGE();
3203 IEM_MC_FETCH_EFLAGS(fEFlags);
3204 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3205 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3206 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3207 pEFlags, uSrc1, uSrc2);
3208 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3209 IEM_MC_COMMIT_EFLAGS(fEFlags);
3210
3211 IEM_MC_ADVANCE_RIP_AND_FINISH();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /*
3217 * Register, memory.
3218 */
3219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3220 IEM_MC_LOCAL(uint32_t, fEFlags);
3221 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3222 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3223 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3225
3226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3227 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3229 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230
3231 IEM_MC_PREPARE_AVX_USAGE();
3232 IEM_MC_FETCH_EFLAGS(fEFlags);
3233 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3234 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3235 pEFlags, uSrc1, uSrc2);
3236 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3237 IEM_MC_COMMIT_EFLAGS(fEFlags);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242}
3243
3244
3245/* Opcode VEX.F3.0F 0x2f - invalid */
3246/* Opcode VEX.F2.0F 0x2f - invalid */
3247
3248/* Opcode VEX.0F 0x30 - invalid */
3249/* Opcode VEX.0F 0x31 - invalid */
3250/* Opcode VEX.0F 0x32 - invalid */
3251/* Opcode VEX.0F 0x33 - invalid */
3252/* Opcode VEX.0F 0x34 - invalid */
3253/* Opcode VEX.0F 0x35 - invalid */
3254/* Opcode VEX.0F 0x36 - invalid */
3255/* Opcode VEX.0F 0x37 - invalid */
3256/* Opcode VEX.0F 0x38 - invalid */
3257/* Opcode VEX.0F 0x39 - invalid */
3258/* Opcode VEX.0F 0x3a - invalid */
3259/* Opcode VEX.0F 0x3b - invalid */
3260/* Opcode VEX.0F 0x3c - invalid */
3261/* Opcode VEX.0F 0x3d - invalid */
3262/* Opcode VEX.0F 0x3e - invalid */
3263/* Opcode VEX.0F 0x3f - invalid */
3264/* Opcode VEX.0F 0x40 - invalid */
3265/* Opcode VEX.0F 0x41 - invalid */
3266/* Opcode VEX.0F 0x42 - invalid */
3267/* Opcode VEX.0F 0x43 - invalid */
3268/* Opcode VEX.0F 0x44 - invalid */
3269/* Opcode VEX.0F 0x45 - invalid */
3270/* Opcode VEX.0F 0x46 - invalid */
3271/* Opcode VEX.0F 0x47 - invalid */
3272/* Opcode VEX.0F 0x48 - invalid */
3273/* Opcode VEX.0F 0x49 - invalid */
3274/* Opcode VEX.0F 0x4a - invalid */
3275/* Opcode VEX.0F 0x4b - invalid */
3276/* Opcode VEX.0F 0x4c - invalid */
3277/* Opcode VEX.0F 0x4d - invalid */
3278/* Opcode VEX.0F 0x4e - invalid */
3279/* Opcode VEX.0F 0x4f - invalid */
3280
3281
3282/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3283FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3284{
3285 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3287 if (IEM_IS_MODRM_REG_MODE(bRm))
3288 {
3289 /*
3290 * Register, register.
3291 */
3292 if (pVCpu->iem.s.uVexLength == 0)
3293 {
3294 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3295 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3296 IEM_MC_LOCAL(uint8_t, u8Dst);
3297 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3298 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3300 IEM_MC_PREPARE_AVX_USAGE();
3301 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3302 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3303 pu8Dst, puSrc);
3304 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 IEM_MC_END();
3307 }
3308 else
3309 {
3310 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3312 IEM_MC_LOCAL(uint8_t, u8Dst);
3313 IEM_MC_LOCAL(RTUINT256U, uSrc);
3314 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3315 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3316
3317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3318 IEM_MC_PREPARE_AVX_USAGE();
3319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3320 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3321 pu8Dst, puSrc);
3322 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 IEM_MC_END();
3325 }
3326 }
3327 /* No memory operand. */
3328 else
3329 IEMOP_RAISE_INVALID_OPCODE_RET();
3330}
3331
3332
3333/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3334FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3335{
3336 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3338 if (IEM_IS_MODRM_REG_MODE(bRm))
3339 {
3340 /*
3341 * Register, register.
3342 */
3343 if (pVCpu->iem.s.uVexLength == 0)
3344 {
3345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3346 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3347 IEM_MC_LOCAL(uint8_t, u8Dst);
3348 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3349 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_PREPARE_AVX_USAGE();
3352 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3353 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3354 pu8Dst, puSrc);
3355 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359 else
3360 {
3361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3363 IEM_MC_LOCAL(uint8_t, u8Dst);
3364 IEM_MC_LOCAL(RTUINT256U, uSrc);
3365 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3366 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3367
3368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3369 IEM_MC_PREPARE_AVX_USAGE();
3370 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3371 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3372 pu8Dst, puSrc);
3373 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 IEM_MC_END();
3376 }
3377 }
3378 /* No memory operand. */
3379 else
3380 IEMOP_RAISE_INVALID_OPCODE_RET();
3381}
3382
3383
3384/* Opcode VEX.F3.0F 0x50 - invalid */
3385/* Opcode VEX.F2.0F 0x50 - invalid */
3386
3387/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3388FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3389{
3390 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3391 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3392 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3393}
3394
3395
3396/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3397FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3398{
3399 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3400 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3401 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3402}
3403
3404
3405/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3406FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3407{
3408 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3409 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3410 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3411}
3412
3413
3414/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3415FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3416{
3417 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3418 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3419 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3420}
3421
3422
3423/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3424FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3425{
3426 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3427 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3429}
3430
3431
3432/* Opcode VEX.66.0F 0x52 - invalid */
3433
3434
3435/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3436FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3437{
3438 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3439 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3440 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3441}
3442
3443
3444/* Opcode VEX.F2.0F 0x52 - invalid */
3445
3446
3447/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3448FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3449{
3450 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3451 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3452 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3453}
3454
3455
3456/* Opcode VEX.66.0F 0x53 - invalid */
3457
3458
3459/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3460FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3461{
3462 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3463 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3464 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3465}
3466
3467
3468/* Opcode VEX.F2.0F 0x53 - invalid */
3469
3470
3471/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3472FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3473{
3474 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3475 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3476 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3477}
3478
3479
3480/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3481FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3482{
3483 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3485 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3486}
3487
3488
3489/* Opcode VEX.F3.0F 0x54 - invalid */
3490/* Opcode VEX.F2.0F 0x54 - invalid */
3491
3492
3493/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3494FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3495{
3496 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3497 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3498 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3499}
3500
3501
3502/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3503FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3504{
3505 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3506 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3507 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3508}
3509
3510
3511/* Opcode VEX.F3.0F 0x55 - invalid */
3512/* Opcode VEX.F2.0F 0x55 - invalid */
3513
3514/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3515FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3516{
3517 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3519 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3520}
3521
3522
3523/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3524FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3525{
3526 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3528 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3529}
3530
3531
3532/* Opcode VEX.F3.0F 0x56 - invalid */
3533/* Opcode VEX.F2.0F 0x56 - invalid */
3534
3535
3536/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3537FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3538{
3539 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3540 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3541 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3546FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3547{
3548 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3549 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3551}
3552
3553
3554/* Opcode VEX.F3.0F 0x57 - invalid */
3555/* Opcode VEX.F2.0F 0x57 - invalid */
3556
3557
3558/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3559FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3560{
3561 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3562 IEMOPMEDIAF3_INIT_VARS( vaddps);
3563 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3568FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3569{
3570 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3573}
3574
3575
3576/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3577FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3582}
3583
3584
3585/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3586FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3587{
3588 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3589 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3590 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3591}
3592
3593
3594/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3595FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3596{
3597 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3598 IEMOPMEDIAF3_INIT_VARS( vmulps);
3599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3600}
3601
3602
3603/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3604FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3605{
3606 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3607 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3608 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3609}
3610
3611
3612/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3613FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3614{
3615 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3616 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3617 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3618}
3619
3620
3621/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3622FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3623{
3624 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3625 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3626 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3627}
3628
3629
3630/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3631FNIEMOP_DEF(iemOp_vcvtps2pd_Vpd_Wps)
3632{
3633 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2PD, vcvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3635 if (IEM_IS_MODRM_REG_MODE(bRm))
3636 {
3637 /*
3638 * Register, register.
3639 */
3640 if (pVCpu->iem.s.uVexLength)
3641 {
3642 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3643 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3644 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3645 IEM_MC_PREPARE_AVX_USAGE();
3646
3647 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3648 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3649 IEM_MC_LOCAL( X86YMMREG, uDst);
3650 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3651 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3652 iemAImpl_vcvtps2pd_u256_u128,
3653 iemAImpl_vcvtps2pd_u256_u128_fallback),
3654 puDst, puSrc);
3655 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3656 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 IEM_MC_END();
3659 }
3660 else
3661 {
3662 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3665 IEM_MC_PREPARE_AVX_USAGE();
3666
3667 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
3668 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3669 IEM_MC_LOCAL( X86XMMREG, uDst);
3670 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3671 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3672 iemAImpl_vcvtps2pd_u128_u64,
3673 iemAImpl_vcvtps2pd_u128_u64_fallback),
3674 puDst, pu64Src);
3675 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3676 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3677 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3678 IEM_MC_ADVANCE_RIP_AND_FINISH();
3679 IEM_MC_END();
3680 }
3681 }
3682 else
3683 {
3684 /*
3685 * Register, memory.
3686 */
3687 if (pVCpu->iem.s.uVexLength)
3688 {
3689 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3692 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3694 IEM_MC_PREPARE_AVX_USAGE();
3695
3696 IEM_MC_LOCAL(X86XMMREG, uSrc);
3697 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3698 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3699 IEM_MC_LOCAL(X86YMMREG, uDst);
3700 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3701 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3702 iemAImpl_vcvtps2pd_u256_u128,
3703 iemAImpl_vcvtps2pd_u256_u128_fallback),
3704 puDst, puSrc);
3705 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3707 IEM_MC_ADVANCE_RIP_AND_FINISH();
3708 IEM_MC_END();
3709 }
3710 else
3711 {
3712 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3715 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3716 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3717 IEM_MC_PREPARE_AVX_USAGE();
3718
3719 IEM_MC_LOCAL( uint64_t, u64Src);
3720 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
3721 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3722 IEM_MC_LOCAL( X86XMMREG, uDst);
3723 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3724 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3725 iemAImpl_vcvtps2pd_u128_u64,
3726 iemAImpl_vcvtps2pd_u128_u64_fallback),
3727 puDst, pu64Src);
3728 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3729 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3730 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3731 IEM_MC_ADVANCE_RIP_AND_FINISH();
3732 IEM_MC_END();
3733 }
3734 }
3735}
3736
3737
3738/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3739FNIEMOP_DEF(iemOp_vcvtpd2ps_Vps_Wpd)
3740{
3741 IEMOP_MNEMONIC2(VEX_RM, VCVTPD2PS, vcvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3743 if (IEM_IS_MODRM_REG_MODE(bRm))
3744 {
3745 /*
3746 * Register, register.
3747 */
3748 if (pVCpu->iem.s.uVexLength)
3749 {
3750 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3751 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3752 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3753 IEM_MC_PREPARE_AVX_USAGE();
3754
3755 IEM_MC_LOCAL( X86YMMREG, uSrc);
3756 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3757 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3758 IEM_MC_LOCAL( X86XMMREG, uDst);
3759 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3760 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3761 iemAImpl_vcvtpd2ps_u128_u256,
3762 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3763 puDst, puSrc);
3764 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3765 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3766 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3767 IEM_MC_ADVANCE_RIP_AND_FINISH();
3768 IEM_MC_END();
3769 }
3770 else
3771 {
3772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3774 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3775 IEM_MC_PREPARE_AVX_USAGE();
3776
3777 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3778 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3779 IEM_MC_LOCAL( X86XMMREG, uDst);
3780 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3781 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3782 iemAImpl_vcvtpd2ps_u128_u128,
3783 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3784 puDst, puSrc);
3785 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3786 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3787 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3788 IEM_MC_ADVANCE_RIP_AND_FINISH();
3789 IEM_MC_END();
3790 }
3791 }
3792 else
3793 {
3794 /*
3795 * Register, memory.
3796 */
3797 if (pVCpu->iem.s.uVexLength)
3798 {
3799 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3802 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3803 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3804 IEM_MC_PREPARE_AVX_USAGE();
3805
3806 IEM_MC_LOCAL( X86YMMREG, uSrc);
3807 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3808 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3809 IEM_MC_LOCAL( X86XMMREG, uDst);
3810 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3812 iemAImpl_vcvtpd2ps_u128_u256,
3813 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3814 puDst, puSrc);
3815 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3816 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3817 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3826 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3827 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3828 IEM_MC_PREPARE_AVX_USAGE();
3829
3830 IEM_MC_LOCAL(X86XMMREG, uSrc);
3831 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3832 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3833 IEM_MC_LOCAL( X86XMMREG, uDst);
3834 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3835 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3836 iemAImpl_vcvtpd2ps_u128_u128,
3837 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3838 puDst, puSrc);
3839 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3840 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3841 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3842 IEM_MC_ADVANCE_RIP_AND_FINISH();
3843 IEM_MC_END();
3844 }
3845 }
3846}
3847
3848
3849/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3850FNIEMOP_DEF(iemOp_vcvtss2sd_Vsd_Hx_Wss)
3851{
3852 IEMOP_MNEMONIC3(VEX_RVM, VCVTSS2SD, vcvtss2sd, Vsd, Hx, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3853 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3854 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback));
3855}
3856
3857
3858/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3859FNIEMOP_DEF(iemOp_vcvtsd2ss_Vss_Hx_Wsd)
3860{
3861 IEMOP_MNEMONIC3(VEX_RVM, VCVTSD2SS, vcvtsd2ss, Vss, Hx, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3862 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3863 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback));
3864}
3865
3866
3867/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3868FNIEMOP_DEF(iemOp_vcvtdq2ps_Vps_Wdq)
3869{
3870 IEMOP_MNEMONIC2(VEX_RM, VCVTDQ2PS, vcvtdq2ps, Vps, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3871 IEMOPMEDIAF2_INIT_VARS( vcvtdq2ps);
3872 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3873}
3874
3875
3876/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3877FNIEMOP_DEF(iemOp_vcvtps2dq_Vdq_Wps)
3878{
3879 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2DQ, vcvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3880 IEMOPMEDIAF2_INIT_VARS( vcvtps2dq);
3881 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3882}
3883
3884
3885/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3886FNIEMOP_DEF(iemOp_vcvttps2dq_Vdq_Wps)
3887{
3888 IEMOP_MNEMONIC2(VEX_RM, VCVTTPS2DQ, vcvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3889 IEMOPMEDIAF2_INIT_VARS( vcvttps2dq);
3890 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3891}
3892
3893
3894/* Opcode VEX.F2.0F 0x5b - invalid */
3895
3896
3897/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3898FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3899{
3900 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3901 IEMOPMEDIAF3_INIT_VARS( vsubps);
3902 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3903}
3904
3905
3906/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3907FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3908{
3909 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3910 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3911 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3912}
3913
3914
3915/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3916FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3917{
3918 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3919 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3920 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3921}
3922
3923
3924/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3925FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3926{
3927 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3928 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3929 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3930}
3931
3932
3933/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3934FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3935{
3936 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3937 IEMOPMEDIAF3_INIT_VARS( vminps);
3938 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3939}
3940
3941
3942/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3943FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3944{
3945 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3946 IEMOPMEDIAF3_INIT_VARS( vminpd);
3947 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3948}
3949
3950
3951/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3952FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3953{
3954 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3955 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3956 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3957}
3958
3959
3960/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3961FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3962{
3963 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3964 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3965 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3966}
3967
3968
3969/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3970FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3971{
3972 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3973 IEMOPMEDIAF3_INIT_VARS( vdivps);
3974 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3975}
3976
3977
3978/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3979FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3980{
3981 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3982 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3983 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3984}
3985
3986
3987/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3988FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3989{
3990 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3991 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3992 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3993}
3994
3995
3996/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3997FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3998{
3999 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4000 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
4001 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
4002}
4003
4004
4005/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
4006FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
4007{
4008 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4009 IEMOPMEDIAF3_INIT_VARS( vmaxps);
4010 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4011}
4012
4013
4014/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
4015FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
4016{
4017 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4018 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
4019 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4020}
4021
4022
4023/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
4024FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
4025{
4026 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4027 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
4028 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
4029}
4030
4031
4032/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
4033FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
4034{
4035 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4036 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
4037 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
4038}
4039
4040
4041/* Opcode VEX.0F 0x60 - invalid */
4042
4043
4044/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
4045FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
4046{
4047 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4048 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
4049 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4050}
4051
4052
4053/* Opcode VEX.F3.0F 0x60 - invalid */
4054
4055
4056/* Opcode VEX.0F 0x61 - invalid */
4057
4058
4059/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
4060FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
4061{
4062 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4063 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
4064 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4065}
4066
4067
4068/* Opcode VEX.F3.0F 0x61 - invalid */
4069
4070
4071/* Opcode VEX.0F 0x62 - invalid */
4072
4073/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
4074FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
4075{
4076 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4077 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
4078 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4079}
4080
4081
4082/* Opcode VEX.F3.0F 0x62 - invalid */
4083
4084
4085
4086/* Opcode VEX.0F 0x63 - invalid */
4087
4088
4089/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
4090FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
4091{
4092 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4093 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
4094 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4095}
4096
4097
4098/* Opcode VEX.F3.0F 0x63 - invalid */
4099
4100/* Opcode VEX.0F 0x64 - invalid */
4101
4102
4103/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
4104FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
4105{
4106 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4107 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
4108 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4109}
4110
4111
4112/* Opcode VEX.F3.0F 0x64 - invalid */
4113
4114/* Opcode VEX.0F 0x65 - invalid */
4115
4116
4117/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
4118FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
4119{
4120 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4121 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
4122 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4123}
4124
4125
4126/* Opcode VEX.F3.0F 0x65 - invalid */
4127
4128/* Opcode VEX.0F 0x66 - invalid */
4129
4130
4131/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
4132FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
4133{
4134 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4135 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
4136 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4137}
4138
4139
4140/* Opcode VEX.F3.0F 0x66 - invalid */
4141
4142/* Opcode VEX.0F 0x67 - invalid */
4143
4144
4145/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
4146FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
4147{
4148 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4149 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
4150 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4151}
4152
4153
4154/* Opcode VEX.F3.0F 0x67 - invalid */
4155
4156
4157///**
4158// * Common worker for SSE2 instructions on the form:
4159// * pxxxx xmm1, xmm2/mem128
4160// *
4161// * The 2nd operand is the second half of a register, which in the memory case
4162// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
4163// * where it may read the full 128 bits or only the upper 64 bits.
4164// *
4165// * Exceptions type 4.
4166// */
4167//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
4168//{
4169// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4170// if (IEM_IS_MODRM_REG_MODE(bRm))
4171// {
4172// /*
4173// * Register, register.
4174// */
4175// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4176// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4177// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4178// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4179// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4180// IEM_MC_PREPARE_SSE_USAGE();
4181// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4182// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4183// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4184// IEM_MC_ADVANCE_RIP_AND_FINISH();
4185// IEM_MC_END();
4186// }
4187// else
4188// {
4189// /*
4190// * Register, memory.
4191// */
4192// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4193// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4194// IEM_MC_LOCAL(RTUINT128U, uSrc);
4195// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4196// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4197//
4198// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4199// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4200// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4201// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
4202//
4203// IEM_MC_PREPARE_SSE_USAGE();
4204// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4205// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4206//
4207// IEM_MC_ADVANCE_RIP_AND_FINISH();
4208// IEM_MC_END();
4209// }
4210// return VINF_SUCCESS;
4211//}
4212
4213
4214/* Opcode VEX.0F 0x68 - invalid */
4215
4216/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
4217FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
4218{
4219 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4220 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
4221 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4222}
4223
4224
4225/* Opcode VEX.F3.0F 0x68 - invalid */
4226
4227
4228/* Opcode VEX.0F 0x69 - invalid */
4229
4230
4231/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
4232FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
4233{
4234 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4235 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
4236 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4237}
4238
4239
4240/* Opcode VEX.F3.0F 0x69 - invalid */
4241
4242
4243/* Opcode VEX.0F 0x6a - invalid */
4244
4245
4246/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
4247FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
4248{
4249 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4250 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
4251 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4252}
4253
4254
4255/* Opcode VEX.F3.0F 0x6a - invalid */
4256
4257
4258/* Opcode VEX.0F 0x6b - invalid */
4259
4260
4261/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
4262FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
4263{
4264 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4265 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
4266 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4267}
4268
4269
4270/* Opcode VEX.F3.0F 0x6b - invalid */
4271
4272
4273/* Opcode VEX.0F 0x6c - invalid */
4274
4275
4276/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
4277FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
4278{
4279 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4280 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
4281 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4282}
4283
4284
4285/* Opcode VEX.F3.0F 0x6c - invalid */
4286/* Opcode VEX.F2.0F 0x6c - invalid */
4287
4288
4289/* Opcode VEX.0F 0x6d - invalid */
4290
4291
4292/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
4293FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
4294{
4295 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4296 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
4297 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4298}
4299
4300
4301/* Opcode VEX.F3.0F 0x6d - invalid */
4302
4303
4304/* Opcode VEX.0F 0x6e - invalid */
4305
4306FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
4307{
4308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4310 {
4311 /**
4312 * @opcode 0x6e
4313 * @opcodesub rex.w=1
4314 * @oppfx 0x66
4315 * @opcpuid avx
4316 * @opgroup og_avx_simdint_datamov
4317 * @opxcpttype 5
4318 * @optest 64-bit / op1=1 op2=2 -> op1=2
4319 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4320 */
4321 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4322 if (IEM_IS_MODRM_REG_MODE(bRm))
4323 {
4324 /* XMM, greg64 */
4325 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4326 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4327 IEM_MC_LOCAL(uint64_t, u64Tmp);
4328
4329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4331
4332 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4333 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338 else
4339 {
4340 /* XMM, [mem64] */
4341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4343 IEM_MC_LOCAL(uint64_t, u64Tmp);
4344
4345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4346 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4349
4350 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4351 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4352
4353 IEM_MC_ADVANCE_RIP_AND_FINISH();
4354 IEM_MC_END();
4355 }
4356 }
4357 else
4358 {
4359 /**
4360 * @opdone
4361 * @opcode 0x6e
4362 * @opcodesub rex.w=0
4363 * @oppfx 0x66
4364 * @opcpuid avx
4365 * @opgroup og_avx_simdint_datamov
4366 * @opxcpttype 5
4367 * @opfunction iemOp_vmovd_q_Vy_Ey
4368 * @optest op1=1 op2=2 -> op1=2
4369 * @optest op1=0 op2=-42 -> op1=-42
4370 */
4371 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4372 if (IEM_IS_MODRM_REG_MODE(bRm))
4373 {
4374 /* XMM, greg32 */
4375 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4376 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4377 IEM_MC_LOCAL(uint32_t, u32Tmp);
4378
4379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4380 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4381
4382 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4383 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4384
4385 IEM_MC_ADVANCE_RIP_AND_FINISH();
4386 IEM_MC_END();
4387 }
4388 else
4389 {
4390 /* XMM, [mem32] */
4391 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4393 IEM_MC_LOCAL(uint32_t, u32Tmp);
4394
4395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4396 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4397 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4398 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4399
4400 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4401 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4402
4403 IEM_MC_ADVANCE_RIP_AND_FINISH();
4404 IEM_MC_END();
4405 }
4406 }
4407}
4408
4409
4410/* Opcode VEX.F3.0F 0x6e - invalid */
4411
4412
4413/* Opcode VEX.0F 0x6f - invalid */
4414
4415/**
4416 * @opcode 0x6f
4417 * @oppfx 0x66
4418 * @opcpuid avx
4419 * @opgroup og_avx_simdint_datamove
4420 * @opxcpttype 1
4421 * @optest op1=1 op2=2 -> op1=2
4422 * @optest op1=0 op2=-42 -> op1=-42
4423 */
4424FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
4425{
4426 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4427 Assert(pVCpu->iem.s.uVexLength <= 1);
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 if (IEM_IS_MODRM_REG_MODE(bRm))
4430 {
4431 /*
4432 * Register, register.
4433 */
4434 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4436
4437 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4438 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4439 if (pVCpu->iem.s.uVexLength == 0)
4440 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4441 IEM_GET_MODRM_RM(pVCpu, bRm));
4442 else
4443 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4444 IEM_GET_MODRM_RM(pVCpu, bRm));
4445 IEM_MC_ADVANCE_RIP_AND_FINISH();
4446 IEM_MC_END();
4447 }
4448 else if (pVCpu->iem.s.uVexLength == 0)
4449 {
4450 /*
4451 * Register, memory128.
4452 */
4453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4454 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4456
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4458 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4460 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4461
4462 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4463 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4464
4465 IEM_MC_ADVANCE_RIP_AND_FINISH();
4466 IEM_MC_END();
4467 }
4468 else
4469 {
4470 /*
4471 * Register, memory256.
4472 */
4473 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4474 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4476
4477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4479 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4480 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4481
4482 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4483 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4484
4485 IEM_MC_ADVANCE_RIP_AND_FINISH();
4486 IEM_MC_END();
4487 }
4488}
4489
4490/**
4491 * @opcode 0x6f
4492 * @oppfx 0xf3
4493 * @opcpuid avx
4494 * @opgroup og_avx_simdint_datamove
4495 * @opxcpttype 4UA
4496 * @optest op1=1 op2=2 -> op1=2
4497 * @optest op1=0 op2=-42 -> op1=-42
4498 */
4499FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4500{
4501 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4502 Assert(pVCpu->iem.s.uVexLength <= 1);
4503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4504 if (IEM_IS_MODRM_REG_MODE(bRm))
4505 {
4506 /*
4507 * Register, register.
4508 */
4509 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4511
4512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4514 if (pVCpu->iem.s.uVexLength == 0)
4515 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4516 IEM_GET_MODRM_RM(pVCpu, bRm));
4517 else
4518 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4519 IEM_GET_MODRM_RM(pVCpu, bRm));
4520 IEM_MC_ADVANCE_RIP_AND_FINISH();
4521 IEM_MC_END();
4522 }
4523 else if (pVCpu->iem.s.uVexLength == 0)
4524 {
4525 /*
4526 * Register, memory128.
4527 */
4528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4529 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4531
4532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4533 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4535 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4536
4537 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4538 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543 else
4544 {
4545 /*
4546 * Register, memory256.
4547 */
4548 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4549 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4551
4552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4553 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4554 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4555 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4556
4557 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4558 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4559
4560 IEM_MC_ADVANCE_RIP_AND_FINISH();
4561 IEM_MC_END();
4562 }
4563}
4564
4565
4566/* Opcode VEX.0F 0x70 - invalid */
4567
4568
4569/**
4570 * Common worker for AVX/AVX2 instructions on the forms:
4571 * - vpxxx xmm0, xmm2/mem128, imm8
4572 * - vpxxx ymm0, ymm2/mem256, imm8
4573 *
4574 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4575 */
4576FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4577{
4578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4579 if (IEM_IS_MODRM_REG_MODE(bRm))
4580 {
4581 /*
4582 * Register, register.
4583 */
4584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4585 if (pVCpu->iem.s.uVexLength)
4586 {
4587 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4589 IEM_MC_LOCAL(RTUINT256U, uDst);
4590 IEM_MC_LOCAL(RTUINT256U, uSrc);
4591 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4592 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4595 IEM_MC_PREPARE_AVX_USAGE();
4596 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4597 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4598 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4599 IEM_MC_ADVANCE_RIP_AND_FINISH();
4600 IEM_MC_END();
4601 }
4602 else
4603 {
4604 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4605 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4606 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4607 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4608 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4610 IEM_MC_PREPARE_AVX_USAGE();
4611 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4612 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4613 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4614 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4615 IEM_MC_ADVANCE_RIP_AND_FINISH();
4616 IEM_MC_END();
4617 }
4618 }
4619 else
4620 {
4621 /*
4622 * Register, memory.
4623 */
4624 if (pVCpu->iem.s.uVexLength)
4625 {
4626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4627 IEM_MC_LOCAL(RTUINT256U, uDst);
4628 IEM_MC_LOCAL(RTUINT256U, uSrc);
4629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4630 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4631 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4632
4633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4635 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4636 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4637 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4638 IEM_MC_PREPARE_AVX_USAGE();
4639
4640 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4641 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4642 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4643
4644 IEM_MC_ADVANCE_RIP_AND_FINISH();
4645 IEM_MC_END();
4646 }
4647 else
4648 {
4649 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4650 IEM_MC_LOCAL(RTUINT128U, uSrc);
4651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4652 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4653 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4654
4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4656 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4657 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4658 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4660 IEM_MC_PREPARE_AVX_USAGE();
4661
4662 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4663 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4664 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4665 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4666
4667 IEM_MC_ADVANCE_RIP_AND_FINISH();
4668 IEM_MC_END();
4669 }
4670 }
4671}
4672
4673
4674/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4675FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4676{
4677 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4678 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4679 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4680
4681}
4682
4683
4684/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4685FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4686{
4687 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4688 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4689 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4690
4691}
4692
4693
4694/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4695FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4696{
4697 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4698 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4699 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4700}
4701
4702
4703/**
4704 * Common worker(s) for AVX/AVX2 instructions on the forms:
4705 * - vpxxx xmm0, xmm2, imm8
4706 * - vpxxx ymm0, ymm2, imm8
4707 *
4708 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4709 */
4710FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4711{
4712 if (IEM_IS_MODRM_REG_MODE(bRm))
4713 {
4714 /*
4715 * Register, register.
4716 */
4717 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4718 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4719 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4720 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4721 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4722 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4724 IEM_MC_PREPARE_AVX_USAGE();
4725 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4726 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4727 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4728 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4729 IEM_MC_ADVANCE_RIP_AND_FINISH();
4730 IEM_MC_END();
4731 }
4732 /* No memory operand. */
4733 else
4734 IEMOP_RAISE_INVALID_OPCODE_RET();
4735}
4736
4737FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4738{
4739 if (IEM_IS_MODRM_REG_MODE(bRm))
4740 {
4741 /*
4742 * Register, register.
4743 */
4744 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4745 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4746 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4747 IEM_MC_LOCAL(RTUINT256U, uDst);
4748 IEM_MC_LOCAL(RTUINT256U, uSrc);
4749 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4750 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4751 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4752 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4753 IEM_MC_PREPARE_AVX_USAGE();
4754 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4755 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4756 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4757 IEM_MC_ADVANCE_RIP_AND_FINISH();
4758 IEM_MC_END();
4759 }
4760 /* No memory operand. */
4761 else
4762 IEMOP_RAISE_INVALID_OPCODE_RET();
4763}
4764
4765
4766/* Opcode VEX.0F 0x71 11/2 - invalid. */
4767/** Opcode VEX.66.0F 0x71 11/2. */
4768FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4769{
4770 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4771 if (pVCpu->iem.s.uVexLength)
4772 {
4773 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4774 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4775 }
4776 else
4777 {
4778 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4779 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4780 }
4781}
4782
4783
4784/* Opcode VEX.0F 0x71 11/4 - invalid */
4785/** Opcode VEX.66.0F 0x71 11/4. */
4786FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4787{
4788 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4789 if (pVCpu->iem.s.uVexLength)
4790 {
4791 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4792 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4793 }
4794 else
4795 {
4796 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4797 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4798 }
4799}
4800
4801/* Opcode VEX.0F 0x71 11/6 - invalid */
4802
4803/** Opcode VEX.66.0F 0x71 11/6. */
4804FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4805{
4806 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4807 if (pVCpu->iem.s.uVexLength)
4808 {
4809 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4811 }
4812 else
4813 {
4814 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4815 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4816 }
4817}
4818
4819
4820/**
4821 * VEX Group 12 jump table for register variant.
4822 */
4823IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4824{
4825 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4826 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4827 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4828 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4829 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4830 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4831 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4832 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4833};
4834AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4835
4836
4837/** Opcode VEX.0F 0x71. */
4838FNIEMOP_DEF(iemOp_VGrp12)
4839{
4840 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4841 if (IEM_IS_MODRM_REG_MODE(bRm))
4842 /* register, register */
4843 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4844 + pVCpu->iem.s.idxPrefix], bRm);
4845 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4846}
4847
4848
4849/* Opcode VEX.0F 0x72 11/2 - invalid. */
4850/** Opcode VEX.66.0F 0x72 11/2. */
4851FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4852{
4853 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4854 if (pVCpu->iem.s.uVexLength)
4855 {
4856 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4857 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4858 }
4859 else
4860 {
4861 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4862 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4863 }
4864}
4865
4866
4867/* Opcode VEX.0F 0x72 11/4 - invalid. */
4868/** Opcode VEX.66.0F 0x72 11/4. */
4869FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4870{
4871 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4872 if (pVCpu->iem.s.uVexLength)
4873 {
4874 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4875 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4876 }
4877 else
4878 {
4879 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4880 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4881 }
4882}
4883
4884/* Opcode VEX.0F 0x72 11/6 - invalid. */
4885
4886/** Opcode VEX.66.0F 0x72 11/6. */
4887FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4888{
4889 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4890 if (pVCpu->iem.s.uVexLength)
4891 {
4892 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4893 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4894 }
4895 else
4896 {
4897 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4898 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4899 }
4900}
4901
4902
4903/**
4904 * Group 13 jump table for register variant.
4905 */
4906IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4907{
4908 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4909 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4910 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4911 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4912 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4913 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4914 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4915 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4916};
4917AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4918
4919/** Opcode VEX.0F 0x72. */
4920FNIEMOP_DEF(iemOp_VGrp13)
4921{
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 if (IEM_IS_MODRM_REG_MODE(bRm))
4924 /* register, register */
4925 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4926 + pVCpu->iem.s.idxPrefix], bRm);
4927 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4928}
4929
4930
4931/* Opcode VEX.0F 0x73 11/2 - invalid. */
4932/** Opcode VEX.66.0F 0x73 11/2. */
4933FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4934{
4935 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4936 if (pVCpu->iem.s.uVexLength)
4937 {
4938 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4939 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4940 }
4941 else
4942 {
4943 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4944 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4945 }
4946}
4947
4948
4949/** Opcode VEX.66.0F 0x73 11/3. */
4950FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4951{
4952 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4953 if (pVCpu->iem.s.uVexLength)
4954 {
4955 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4956 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4957 }
4958 else
4959 {
4960 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4961 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4962 }
4963}
4964
4965/* Opcode VEX.0F 0x73 11/6 - invalid. */
4966
4967/** Opcode VEX.66.0F 0x73 11/6. */
4968FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4969{
4970 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4971 if (pVCpu->iem.s.uVexLength)
4972 {
4973 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4974 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4975 }
4976 else
4977 {
4978 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4979 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4980 }
4981}
4982
4983/** Opcode VEX.66.0F 0x73 11/7. */
4984FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4985{
4986 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4987 if (pVCpu->iem.s.uVexLength)
4988 {
4989 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4990 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4991 }
4992 else
4993 {
4994 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4995 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4996 }
4997}
4998
4999/* Opcode VEX.0F 0x73 11/6 - invalid. */
5000
5001/**
5002 * Group 14 jump table for register variant.
5003 */
5004IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
5005{
5006 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5007 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5008 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5009 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5010 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5011 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
5012 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5013 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
5014};
5015AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
5016
5017
5018/** Opcode VEX.0F 0x73. */
5019FNIEMOP_DEF(iemOp_VGrp14)
5020{
5021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5022 if (IEM_IS_MODRM_REG_MODE(bRm))
5023 /* register, register */
5024 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5025 + pVCpu->iem.s.idxPrefix], bRm);
5026 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
5027}
5028
5029
5030/* Opcode VEX.0F 0x74 - invalid */
5031
5032
5033/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
5034FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
5035{
5036 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5037 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
5038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5039}
5040
5041/* Opcode VEX.F3.0F 0x74 - invalid */
5042/* Opcode VEX.F2.0F 0x74 - invalid */
5043
5044
5045/* Opcode VEX.0F 0x75 - invalid */
5046
5047
5048/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
5049FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
5050{
5051 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5052 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
5053 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5054}
5055
5056
5057/* Opcode VEX.F3.0F 0x75 - invalid */
5058/* Opcode VEX.F2.0F 0x75 - invalid */
5059
5060
5061/* Opcode VEX.0F 0x76 - invalid */
5062
5063
5064/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
5065FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
5066{
5067 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5068 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
5069 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5070}
5071
5072
5073/* Opcode VEX.F3.0F 0x76 - invalid */
5074/* Opcode VEX.F2.0F 0x76 - invalid */
5075
5076
5077/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
5078FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
5079{
5080 Assert(pVCpu->iem.s.uVexLength <= 1);
5081 if (pVCpu->iem.s.uVexLength == 0)
5082 {
5083 /*
5084 * 128-bit: vzeroupper
5085 */
5086 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
5087 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5088
5089 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5091 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5092
5093 IEM_MC_CLEAR_YREG_128_UP(0);
5094 IEM_MC_CLEAR_YREG_128_UP(1);
5095 IEM_MC_CLEAR_YREG_128_UP(2);
5096 IEM_MC_CLEAR_YREG_128_UP(3);
5097 IEM_MC_CLEAR_YREG_128_UP(4);
5098 IEM_MC_CLEAR_YREG_128_UP(5);
5099 IEM_MC_CLEAR_YREG_128_UP(6);
5100 IEM_MC_CLEAR_YREG_128_UP(7);
5101
5102 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5103 {
5104 IEM_MC_CLEAR_YREG_128_UP( 8);
5105 IEM_MC_CLEAR_YREG_128_UP( 9);
5106 IEM_MC_CLEAR_YREG_128_UP(10);
5107 IEM_MC_CLEAR_YREG_128_UP(11);
5108 IEM_MC_CLEAR_YREG_128_UP(12);
5109 IEM_MC_CLEAR_YREG_128_UP(13);
5110 IEM_MC_CLEAR_YREG_128_UP(14);
5111 IEM_MC_CLEAR_YREG_128_UP(15);
5112 }
5113
5114 IEM_MC_ADVANCE_RIP_AND_FINISH();
5115 IEM_MC_END();
5116 }
5117 else
5118 {
5119 /*
5120 * 256-bit: vzeroall
5121 */
5122 IEMOP_MNEMONIC(vzeroall, "vzeroall");
5123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5124
5125 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5126 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5127 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5128
5129 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
5130 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
5131 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
5132 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
5133 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
5134 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
5135 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
5136 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
5137 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
5138
5139 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5140 {
5141 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
5142 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
5143 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
5144 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
5145 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
5146 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
5147 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
5148 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
5149 }
5150
5151 IEM_MC_ADVANCE_RIP_AND_FINISH();
5152 IEM_MC_END();
5153 }
5154}
5155
5156
5157/* Opcode VEX.66.0F 0x77 - invalid */
5158/* Opcode VEX.F3.0F 0x77 - invalid */
5159/* Opcode VEX.F2.0F 0x77 - invalid */
5160
5161/* Opcode VEX.0F 0x78 - invalid */
5162/* Opcode VEX.66.0F 0x78 - invalid */
5163/* Opcode VEX.F3.0F 0x78 - invalid */
5164/* Opcode VEX.F2.0F 0x78 - invalid */
5165
5166/* Opcode VEX.0F 0x79 - invalid */
5167/* Opcode VEX.66.0F 0x79 - invalid */
5168/* Opcode VEX.F3.0F 0x79 - invalid */
5169/* Opcode VEX.F2.0F 0x79 - invalid */
5170
5171/* Opcode VEX.0F 0x7a - invalid */
5172/* Opcode VEX.66.0F 0x7a - invalid */
5173/* Opcode VEX.F3.0F 0x7a - invalid */
5174/* Opcode VEX.F2.0F 0x7a - invalid */
5175
5176/* Opcode VEX.0F 0x7b - invalid */
5177/* Opcode VEX.66.0F 0x7b - invalid */
5178/* Opcode VEX.F3.0F 0x7b - invalid */
5179/* Opcode VEX.F2.0F 0x7b - invalid */
5180
5181/* Opcode VEX.0F 0x7c - invalid */
5182
5183
5184/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
5185FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
5186{
5187 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5188 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
5189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5190}
5191
5192
5193/* Opcode VEX.F3.0F 0x7c - invalid */
5194
5195
5196/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
5197FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
5198{
5199 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5200 IEMOPMEDIAF3_INIT_VARS( vhaddps);
5201 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5202}
5203
5204
5205/* Opcode VEX.0F 0x7d - invalid */
5206
5207
5208/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
5209FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
5210{
5211 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5212 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
5213 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5214}
5215
5216
5217/* Opcode VEX.F3.0F 0x7d - invalid */
5218
5219
5220/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
5221FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
5222{
5223 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5224 IEMOPMEDIAF3_INIT_VARS( vhsubps);
5225 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5226}
5227
5228
5229/* Opcode VEX.0F 0x7e - invalid */
5230
5231FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
5232{
5233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5235 {
5236 /**
5237 * @opcode 0x7e
5238 * @opcodesub rex.w=1
5239 * @oppfx 0x66
5240 * @opcpuid avx
5241 * @opgroup og_avx_simdint_datamov
5242 * @opxcpttype 5
5243 * @optest 64-bit / op1=1 op2=2 -> op1=2
5244 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5245 */
5246 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5247 if (IEM_IS_MODRM_REG_MODE(bRm))
5248 {
5249 /* greg64, XMM */
5250 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5251 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5252 IEM_MC_LOCAL(uint64_t, u64Tmp);
5253
5254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5255 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5256
5257 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 }
5263 else
5264 {
5265 /* [mem64], XMM */
5266 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5268 IEM_MC_LOCAL(uint64_t, u64Tmp);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5271 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5272 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5273 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5274
5275 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5276 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 }
5281 }
5282 else
5283 {
5284 /**
5285 * @opdone
5286 * @opcode 0x7e
5287 * @opcodesub rex.w=0
5288 * @oppfx 0x66
5289 * @opcpuid avx
5290 * @opgroup og_avx_simdint_datamov
5291 * @opxcpttype 5
5292 * @opfunction iemOp_vmovd_q_Vy_Ey
5293 * @optest op1=1 op2=2 -> op1=2
5294 * @optest op1=0 op2=-42 -> op1=-42
5295 */
5296 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5297 if (IEM_IS_MODRM_REG_MODE(bRm))
5298 {
5299 /* greg32, XMM */
5300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5301 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5302 IEM_MC_LOCAL(uint32_t, u32Tmp);
5303
5304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5305 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5306
5307 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5308 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5309
5310 IEM_MC_ADVANCE_RIP_AND_FINISH();
5311 IEM_MC_END();
5312 }
5313 else
5314 {
5315 /* [mem32], XMM */
5316 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5318 IEM_MC_LOCAL(uint32_t, u32Tmp);
5319
5320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5321 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5323 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5324
5325 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5327
5328 IEM_MC_ADVANCE_RIP_AND_FINISH();
5329 IEM_MC_END();
5330 }
5331 }
5332}
5333
5334
5335/**
5336 * @opcode 0x7e
5337 * @oppfx 0xf3
5338 * @opcpuid avx
5339 * @opgroup og_avx_pcksclr_datamove
5340 * @opxcpttype none
5341 * @optest op1=1 op2=2 -> op1=2
5342 * @optest op1=0 op2=-42 -> op1=-42
5343 */
5344FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
5345{
5346 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5348 if (IEM_IS_MODRM_REG_MODE(bRm))
5349 {
5350 /*
5351 * Register, register.
5352 */
5353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5355
5356 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5357 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5358
5359 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
5360 IEM_GET_MODRM_RM(pVCpu, bRm));
5361 IEM_MC_ADVANCE_RIP_AND_FINISH();
5362 IEM_MC_END();
5363 }
5364 else
5365 {
5366 /*
5367 * Memory, register.
5368 */
5369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5370 IEM_MC_LOCAL(uint64_t, uSrc);
5371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5372
5373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5374 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5376 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5377
5378 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5379 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5380
5381 IEM_MC_ADVANCE_RIP_AND_FINISH();
5382 IEM_MC_END();
5383 }
5384
5385}
5386/* Opcode VEX.F2.0F 0x7e - invalid */
5387
5388
5389/* Opcode VEX.0F 0x7f - invalid */
5390
5391/**
5392 * @opcode 0x7f
5393 * @oppfx 0x66
5394 * @opcpuid avx
5395 * @opgroup og_avx_simdint_datamove
5396 * @opxcpttype 1
5397 * @optest op1=1 op2=2 -> op1=2
5398 * @optest op1=0 op2=-42 -> op1=-42
5399 */
5400FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
5401{
5402 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5403 Assert(pVCpu->iem.s.uVexLength <= 1);
5404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5405 if (IEM_IS_MODRM_REG_MODE(bRm))
5406 {
5407 /*
5408 * Register, register.
5409 */
5410 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5411 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5412
5413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5415 if (pVCpu->iem.s.uVexLength == 0)
5416 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5417 IEM_GET_MODRM_REG(pVCpu, bRm));
5418 else
5419 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5420 IEM_GET_MODRM_REG(pVCpu, bRm));
5421 IEM_MC_ADVANCE_RIP_AND_FINISH();
5422 IEM_MC_END();
5423 }
5424 else if (pVCpu->iem.s.uVexLength == 0)
5425 {
5426 /*
5427 * Register, memory128.
5428 */
5429 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5430 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5432
5433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5437
5438 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5439 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5440
5441 IEM_MC_ADVANCE_RIP_AND_FINISH();
5442 IEM_MC_END();
5443 }
5444 else
5445 {
5446 /*
5447 * Register, memory256.
5448 */
5449 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5450 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5452
5453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5454 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5456 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5457
5458 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5459 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5460
5461 IEM_MC_ADVANCE_RIP_AND_FINISH();
5462 IEM_MC_END();
5463 }
5464}
5465
5466
5467/**
5468 * @opcode 0x7f
5469 * @oppfx 0xf3
5470 * @opcpuid avx
5471 * @opgroup og_avx_simdint_datamove
5472 * @opxcpttype 4UA
5473 * @optest op1=1 op2=2 -> op1=2
5474 * @optest op1=0 op2=-42 -> op1=-42
5475 */
5476FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5477{
5478 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5479 Assert(pVCpu->iem.s.uVexLength <= 1);
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481 if (IEM_IS_MODRM_REG_MODE(bRm))
5482 {
5483 /*
5484 * Register, register.
5485 */
5486 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5487 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5488
5489 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5490 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5491 if (pVCpu->iem.s.uVexLength == 0)
5492 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5493 IEM_GET_MODRM_REG(pVCpu, bRm));
5494 else
5495 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5496 IEM_GET_MODRM_REG(pVCpu, bRm));
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 }
5500 else if (pVCpu->iem.s.uVexLength == 0)
5501 {
5502 /*
5503 * Register, memory128.
5504 */
5505 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5506 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5508
5509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5513
5514 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5515 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5516
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 }
5520 else
5521 {
5522 /*
5523 * Register, memory256.
5524 */
5525 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5526 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5528
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5530 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5532 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5533
5534 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5535 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5536
5537 IEM_MC_ADVANCE_RIP_AND_FINISH();
5538 IEM_MC_END();
5539 }
5540}
5541
5542/* Opcode VEX.F2.0F 0x7f - invalid */
5543
5544
5545/* Opcode VEX.0F 0x80 - invalid */
5546/* Opcode VEX.0F 0x81 - invalid */
5547/* Opcode VEX.0F 0x82 - invalid */
5548/* Opcode VEX.0F 0x83 - invalid */
5549/* Opcode VEX.0F 0x84 - invalid */
5550/* Opcode VEX.0F 0x85 - invalid */
5551/* Opcode VEX.0F 0x86 - invalid */
5552/* Opcode VEX.0F 0x87 - invalid */
5553/* Opcode VEX.0F 0x88 - invalid */
5554/* Opcode VEX.0F 0x89 - invalid */
5555/* Opcode VEX.0F 0x8a - invalid */
5556/* Opcode VEX.0F 0x8b - invalid */
5557/* Opcode VEX.0F 0x8c - invalid */
5558/* Opcode VEX.0F 0x8d - invalid */
5559/* Opcode VEX.0F 0x8e - invalid */
5560/* Opcode VEX.0F 0x8f - invalid */
5561/* Opcode VEX.0F 0x90 - invalid */
5562/* Opcode VEX.0F 0x91 - invalid */
5563/* Opcode VEX.0F 0x92 - invalid */
5564/* Opcode VEX.0F 0x93 - invalid */
5565/* Opcode VEX.0F 0x94 - invalid */
5566/* Opcode VEX.0F 0x95 - invalid */
5567/* Opcode VEX.0F 0x96 - invalid */
5568/* Opcode VEX.0F 0x97 - invalid */
5569/* Opcode VEX.0F 0x98 - invalid */
5570/* Opcode VEX.0F 0x99 - invalid */
5571/* Opcode VEX.0F 0x9a - invalid */
5572/* Opcode VEX.0F 0x9b - invalid */
5573/* Opcode VEX.0F 0x9c - invalid */
5574/* Opcode VEX.0F 0x9d - invalid */
5575/* Opcode VEX.0F 0x9e - invalid */
5576/* Opcode VEX.0F 0x9f - invalid */
5577/* Opcode VEX.0F 0xa0 - invalid */
5578/* Opcode VEX.0F 0xa1 - invalid */
5579/* Opcode VEX.0F 0xa2 - invalid */
5580/* Opcode VEX.0F 0xa3 - invalid */
5581/* Opcode VEX.0F 0xa4 - invalid */
5582/* Opcode VEX.0F 0xa5 - invalid */
5583/* Opcode VEX.0F 0xa6 - invalid */
5584/* Opcode VEX.0F 0xa7 - invalid */
5585/* Opcode VEX.0F 0xa8 - invalid */
5586/* Opcode VEX.0F 0xa9 - invalid */
5587/* Opcode VEX.0F 0xaa - invalid */
5588/* Opcode VEX.0F 0xab - invalid */
5589/* Opcode VEX.0F 0xac - invalid */
5590/* Opcode VEX.0F 0xad - invalid */
5591
5592
5593/* Opcode VEX.0F 0xae mem/0 - invalid. */
5594/* Opcode VEX.0F 0xae mem/1 - invalid. */
5595
5596/**
5597 * @ opmaps grp15
5598 * @ opcode !11/2
5599 * @ oppfx none
5600 * @ opcpuid sse
5601 * @ opgroup og_sse_mxcsrsm
5602 * @ opxcpttype 5
5603 * @ optest op1=0 -> mxcsr=0
5604 * @ optest op1=0x2083 -> mxcsr=0x2083
5605 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5606 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5607 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5608 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5609 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5610 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5611 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5612 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5613 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5614 */
5615FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5616{
5617 IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5618 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5619 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5621 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5622 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5623 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5624 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_vldmxcsr, iEffSeg, GCPtrEff);
5625 IEM_MC_END();
5626}
5627
5628
5629/**
5630 * @opmaps vexgrp15
5631 * @opcode !11/3
5632 * @oppfx none
5633 * @opcpuid avx
5634 * @opgroup og_avx_mxcsrsm
5635 * @opxcpttype 5
5636 * @optest mxcsr=0 -> op1=0
5637 * @optest mxcsr=0x2083 -> op1=0x2083
5638 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5639 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5640 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5641 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5642 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5643 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5644 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5645 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5646 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5647 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5648 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5649 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5650 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5651 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5652 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5653 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5654 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5655 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5656 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5657 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5658 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5659 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5660 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5661 * -> value.xcpt=0x6
5662 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5663 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5664 * APMv4 rev 3.17 page 509.
5665 * @todo Test this instruction on AMD Ryzen.
5666 */
5667FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5668{
5669 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5670 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5671 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5673 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5674 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5675 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5676 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5677 IEM_MC_END();
5678}
5679
5680/* Opcode VEX.0F 0xae mem/4 - invalid. */
5681/* Opcode VEX.0F 0xae mem/5 - invalid. */
5682/* Opcode VEX.0F 0xae mem/6 - invalid. */
5683/* Opcode VEX.0F 0xae mem/7 - invalid. */
5684
5685/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5686/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5687/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5688/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5689/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5690/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5691/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5692/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5693
5694/**
5695 * Vex group 15 jump table for memory variant.
5696 */
5697IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5698{ /* pfx: none, 066h, 0f3h, 0f2h */
5699 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5700 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5701 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5702 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5703 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5704 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5705 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5706 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5707};
5708AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5709
5710
5711/** Opcode vex. 0xae. */
5712FNIEMOP_DEF(iemOp_VGrp15)
5713{
5714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5715 if (IEM_IS_MODRM_REG_MODE(bRm))
5716 /* register, register */
5717 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5718
5719 /* memory, register */
5720 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5721 + pVCpu->iem.s.idxPrefix], bRm);
5722}
5723
5724
5725/* Opcode VEX.0F 0xaf - invalid. */
5726
5727/* Opcode VEX.0F 0xb0 - invalid. */
5728/* Opcode VEX.0F 0xb1 - invalid. */
5729/* Opcode VEX.0F 0xb2 - invalid. */
5730/* Opcode VEX.0F 0xb2 - invalid. */
5731/* Opcode VEX.0F 0xb3 - invalid. */
5732/* Opcode VEX.0F 0xb4 - invalid. */
5733/* Opcode VEX.0F 0xb5 - invalid. */
5734/* Opcode VEX.0F 0xb6 - invalid. */
5735/* Opcode VEX.0F 0xb7 - invalid. */
5736/* Opcode VEX.0F 0xb8 - invalid. */
5737/* Opcode VEX.0F 0xb9 - invalid. */
5738/* Opcode VEX.0F 0xba - invalid. */
5739/* Opcode VEX.0F 0xbb - invalid. */
5740/* Opcode VEX.0F 0xbc - invalid. */
5741/* Opcode VEX.0F 0xbd - invalid. */
5742/* Opcode VEX.0F 0xbe - invalid. */
5743/* Opcode VEX.0F 0xbf - invalid. */
5744
5745/* Opcode VEX.0F 0xc0 - invalid. */
5746/* Opcode VEX.66.0F 0xc0 - invalid. */
5747/* Opcode VEX.F3.0F 0xc0 - invalid. */
5748/* Opcode VEX.F2.0F 0xc0 - invalid. */
5749
5750/* Opcode VEX.0F 0xc1 - invalid. */
5751/* Opcode VEX.66.0F 0xc1 - invalid. */
5752/* Opcode VEX.F3.0F 0xc1 - invalid. */
5753/* Opcode VEX.F2.0F 0xc1 - invalid. */
5754
5755#define IEMOP_VCMPP_BODY(a_Instr) \
5756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5757 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5758 { \
5759 /* \
5760 * Register, Register. \
5761 */ \
5762 if (pVCpu->iem.s.uVexLength) \
5763 { \
5764 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5765 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5766 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5768 IEM_MC_PREPARE_AVX_USAGE(); \
5769 IEM_MC_LOCAL(X86YMMREG, uDst); \
5770 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5771 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5772 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5773 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5774 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5775 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5776 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5777 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5778 puDst, puSrc, bImmArg); \
5779 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5780 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5782 IEM_MC_END(); \
5783 } \
5784 else \
5785 { \
5786 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5787 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5788 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5789 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5790 IEM_MC_PREPARE_AVX_USAGE(); \
5791 IEM_MC_LOCAL(X86XMMREG, uDst); \
5792 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5793 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5794 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5795 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5796 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5797 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5798 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5799 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5800 puDst, puSrc, bImmArg); \
5801 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5802 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5804 IEM_MC_END(); \
5805 } \
5806 } \
5807 else \
5808 { \
5809 /* \
5810 * Register, Memory. \
5811 */ \
5812 if (pVCpu->iem.s.uVexLength) \
5813 { \
5814 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5817 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5818 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5819 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5820 IEM_MC_PREPARE_AVX_USAGE(); \
5821 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5822 IEM_MC_LOCAL(X86YMMREG, uDst); \
5823 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5824 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5825 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5826 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5827 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5828 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5829 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5830 puDst, puSrc, bImmArg); \
5831 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5832 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5834 IEM_MC_END(); \
5835 } \
5836 else \
5837 { \
5838 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5841 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5842 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5844 IEM_MC_PREPARE_AVX_USAGE(); \
5845 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5846 IEM_MC_LOCAL(X86XMMREG, uDst); \
5847 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5848 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5849 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5850 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5851 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5852 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5853 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5854 puDst, puSrc, bImmArg); \
5855 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); \
5856 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5857 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5858 IEM_MC_END(); \
5859 } \
5860 } \
5861 (void)0
5862
5863
5864/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5865FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5866{
5867 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5868 IEMOP_VCMPP_BODY(vcmpps);
5869}
5870
5871
5872/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5873FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5874{
5875 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5876 IEMOP_VCMPP_BODY(vcmppd);
5877}
5878
5879
5880/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5881FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5882{
5883 IEMOP_MNEMONIC4(VEX_RVMI, CMPSS, cmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5884
5885 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5886 if (IEM_IS_MODRM_REG_MODE(bRm))
5887 {
5888 /*
5889 * XMM32, XMM32.
5890 */
5891 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5892 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5893 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5895 IEM_MC_PREPARE_AVX_USAGE();
5896 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5897 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5898 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5899 IEM_MC_LOCAL(X86XMMREG, uDst);
5900 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5901 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5902 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5903 puDst, puSrc, bImmArg);
5904 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5905 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5906
5907 IEM_MC_ADVANCE_RIP_AND_FINISH();
5908 IEM_MC_END();
5909 }
5910 else
5911 {
5912 /*
5913 * XMM32, [mem32].
5914 */
5915 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5916
5917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5919 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5920 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5922 IEM_MC_PREPARE_AVX_USAGE();
5923
5924 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5925 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5926 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5927 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5928 IEM_MC_LOCAL(X86XMMREG, uDst);
5929 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5930 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5931 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5932 puDst, puSrc, bImmArg);
5933 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5934 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5935
5936 IEM_MC_ADVANCE_RIP_AND_FINISH();
5937 IEM_MC_END();
5938 }
5939}
5940
5941
5942/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5943FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5944{
5945 IEMOP_MNEMONIC4(VEX_RVMI, CMPSD, cmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5946
5947 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5948 if (IEM_IS_MODRM_REG_MODE(bRm))
5949 {
5950 /*
5951 * XMM64, XMM64.
5952 */
5953 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5954 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5955 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5956 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5957 IEM_MC_PREPARE_AVX_USAGE();
5958 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5959 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5960 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5961 IEM_MC_LOCAL(X86XMMREG, uDst);
5962 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5963 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5964 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5965 puDst, puSrc, bImmArg);
5966 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5967 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5968
5969 IEM_MC_ADVANCE_RIP_AND_FINISH();
5970 IEM_MC_END();
5971 }
5972 else
5973 {
5974 /*
5975 * XMM64, [mem64].
5976 */
5977 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5978
5979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5981 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5982 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5983 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5984 IEM_MC_PREPARE_AVX_USAGE();
5985
5986 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5987 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5988 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5989 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5990 IEM_MC_LOCAL(X86XMMREG, uDst);
5991 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5992 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5993 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5994 puDst, puSrc, bImmArg);
5995 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5996 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5997
5998 IEM_MC_ADVANCE_RIP_AND_FINISH();
5999 IEM_MC_END();
6000 }
6001}
6002
6003
6004/* Opcode VEX.0F 0xc3 - invalid */
6005/* Opcode VEX.66.0F 0xc3 - invalid */
6006/* Opcode VEX.F3.0F 0xc3 - invalid */
6007/* Opcode VEX.F2.0F 0xc3 - invalid */
6008
6009/* Opcode VEX.0F 0xc4 - invalid */
6010
6011
6012/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
6013FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
6014{
6015 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
6016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6017 if (IEM_IS_MODRM_REG_MODE(bRm))
6018 {
6019 /*
6020 * Register, register.
6021 */
6022 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6023 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6024 IEM_MC_LOCAL(RTUINT128U, uSrc1);
6025 IEM_MC_LOCAL(uint16_t, uValue);
6026
6027 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6028 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6029 IEM_MC_PREPARE_AVX_USAGE();
6030
6031 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
6032 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
6033 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
6034 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
6035 IEM_MC_ADVANCE_RIP_AND_FINISH();
6036 IEM_MC_END();
6037 }
6038 else
6039 {
6040 /*
6041 * Register, memory.
6042 */
6043 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6045 IEM_MC_LOCAL(RTUINT128U, uSrc1);
6046 IEM_MC_LOCAL(uint16_t, uValue);
6047
6048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6049 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6050 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6051 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6052 IEM_MC_PREPARE_AVX_USAGE();
6053
6054 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
6055 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6056 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
6057 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
6058 IEM_MC_ADVANCE_RIP_AND_FINISH();
6059 IEM_MC_END();
6060 }
6061}
6062
6063
6064/* Opcode VEX.F3.0F 0xc4 - invalid */
6065/* Opcode VEX.F2.0F 0xc4 - invalid */
6066
6067/* Opcode VEX.0F 0xc5 - invalid */
6068
6069
6070/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
6071FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
6072{
6073 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
6074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6075 if (IEM_IS_MODRM_REG_MODE(bRm))
6076 {
6077 /*
6078 * greg32, XMM, imm8.
6079 */
6080 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6081 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6082 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6083 IEM_MC_LOCAL(uint16_t, uValue);
6084 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6085 IEM_MC_PREPARE_AVX_USAGE();
6086 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
6087 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
6088 IEM_MC_ADVANCE_RIP_AND_FINISH();
6089 IEM_MC_END();
6090 }
6091 /* No memory operand. */
6092 else
6093 IEMOP_RAISE_INVALID_OPCODE_RET();
6094}
6095
6096
6097/* Opcode VEX.F3.0F 0xc5 - invalid */
6098/* Opcode VEX.F2.0F 0xc5 - invalid */
6099
6100
6101#define VSHUFP_X(a_Instr) \
6102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
6103 if (IEM_IS_MODRM_REG_MODE(bRm)) \
6104 { \
6105 /* \
6106 * Register, register. \
6107 */ \
6108 if (pVCpu->iem.s.uVexLength) \
6109 { \
6110 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6111 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6112 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6113 IEM_MC_LOCAL(RTUINT256U, uDst); \
6114 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6115 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6116 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6117 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6118 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6119 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6120 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6121 IEM_MC_PREPARE_AVX_USAGE(); \
6122 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6123 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6124 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6125 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6126 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6128 IEM_MC_END(); \
6129 } \
6130 else \
6131 { \
6132 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6133 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6134 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6135 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6136 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6137 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
6138 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6140 IEM_MC_PREPARE_AVX_USAGE(); \
6141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6142 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6143 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6144 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6145 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6147 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6148 IEM_MC_END(); \
6149 } \
6150 } \
6151 else \
6152 { \
6153 /* \
6154 * Register, memory. \
6155 */ \
6156 if (pVCpu->iem.s.uVexLength) \
6157 { \
6158 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6159 IEM_MC_LOCAL(RTUINT256U, uDst); \
6160 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6161 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6163 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6164 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6165 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6167 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6168 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6169 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6170 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6171 IEM_MC_PREPARE_AVX_USAGE(); \
6172 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6173 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6174 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6175 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6176 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6178 IEM_MC_END(); \
6179 } \
6180 else \
6181 { \
6182 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6183 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
6184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6185 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6186 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6187 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
6188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6189 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6190 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6191 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6192 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6193 IEM_MC_PREPARE_AVX_USAGE(); \
6194 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6195 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6196 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6197 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6198 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6199 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6200 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6201 IEM_MC_END(); \
6202 } \
6203 } \
6204 (void)0
6205
6206/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
6207FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
6208{
6209 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6210 VSHUFP_X(vshufps);
6211}
6212
6213
6214/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6215FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
6216{
6217 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6218 VSHUFP_X(vshufpd);
6219}
6220#undef VSHUFP_X
6221
6222
6223/* Opcode VEX.F3.0F 0xc6 - invalid */
6224/* Opcode VEX.F2.0F 0xc6 - invalid */
6225
6226/* Opcode VEX.0F 0xc7 - invalid */
6227/* Opcode VEX.66.0F 0xc7 - invalid */
6228/* Opcode VEX.F3.0F 0xc7 - invalid */
6229/* Opcode VEX.F2.0F 0xc7 - invalid */
6230
6231/* Opcode VEX.0F 0xc8 - invalid */
6232/* Opcode VEX.0F 0xc9 - invalid */
6233/* Opcode VEX.0F 0xca - invalid */
6234/* Opcode VEX.0F 0xcb - invalid */
6235/* Opcode VEX.0F 0xcc - invalid */
6236/* Opcode VEX.0F 0xcd - invalid */
6237/* Opcode VEX.0F 0xce - invalid */
6238/* Opcode VEX.0F 0xcf - invalid */
6239
6240
6241/* Opcode VEX.0F 0xd0 - invalid */
6242
6243
6244/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
6245FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
6246{
6247 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6248 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
6249 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6250}
6251
6252
6253/* Opcode VEX.F3.0F 0xd0 - invalid */
6254
6255
6256/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
6257FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
6258{
6259 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6260 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
6261 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6262}
6263
6264
6265/* Opcode VEX.0F 0xd1 - invalid */
6266
6267
6268/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
6269FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
6270{
6271 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6272 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
6273 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6274}
6275
6276/* Opcode VEX.F3.0F 0xd1 - invalid */
6277/* Opcode VEX.F2.0F 0xd1 - invalid */
6278
6279/* Opcode VEX.0F 0xd2 - invalid */
6280/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
6281FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
6282{
6283 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6284 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
6285 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6286}
6287
6288/* Opcode VEX.F3.0F 0xd2 - invalid */
6289/* Opcode VEX.F2.0F 0xd2 - invalid */
6290
6291/* Opcode VEX.0F 0xd3 - invalid */
6292/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
6293FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
6294{
6295 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6296 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
6297 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6298}
6299
6300/* Opcode VEX.F3.0F 0xd3 - invalid */
6301/* Opcode VEX.F2.0F 0xd3 - invalid */
6302
6303/* Opcode VEX.0F 0xd4 - invalid */
6304
6305
6306/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
6307FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
6308{
6309 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6310 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
6311 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6312}
6313
6314
6315/* Opcode VEX.F3.0F 0xd4 - invalid */
6316/* Opcode VEX.F2.0F 0xd4 - invalid */
6317
6318/* Opcode VEX.0F 0xd5 - invalid */
6319
6320
6321/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
6322FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
6323{
6324 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6325 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
6326 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6327}
6328
6329
6330/* Opcode VEX.F3.0F 0xd5 - invalid */
6331/* Opcode VEX.F2.0F 0xd5 - invalid */
6332
6333/* Opcode VEX.0F 0xd6 - invalid */
6334
6335/**
6336 * @opcode 0xd6
6337 * @oppfx 0x66
6338 * @opcpuid avx
6339 * @opgroup og_avx_pcksclr_datamove
6340 * @opxcpttype none
6341 * @optest op1=-1 op2=2 -> op1=2
6342 * @optest op1=0 op2=-42 -> op1=-42
6343 */
6344FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
6345{
6346 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
6347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6348 if (IEM_IS_MODRM_REG_MODE(bRm))
6349 {
6350 /*
6351 * Register, register.
6352 */
6353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6355
6356 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6357 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6358
6359 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
6360 IEM_GET_MODRM_REG(pVCpu, bRm));
6361 IEM_MC_ADVANCE_RIP_AND_FINISH();
6362 IEM_MC_END();
6363 }
6364 else
6365 {
6366 /*
6367 * Memory, register.
6368 */
6369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6370 IEM_MC_LOCAL(uint64_t, uSrc);
6371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6372
6373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6374 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6376 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6377
6378 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
6379 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6380
6381 IEM_MC_ADVANCE_RIP_AND_FINISH();
6382 IEM_MC_END();
6383 }
6384}
6385
6386/* Opcode VEX.F3.0F 0xd6 - invalid */
6387/* Opcode VEX.F2.0F 0xd6 - invalid */
6388
6389
6390/* Opcode VEX.0F 0xd7 - invalid */
6391
6392/** Opcode VEX.66.0F 0xd7 - */
6393FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
6394{
6395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6396 /* Docs says register only. */
6397 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
6398 {
6399 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6400 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
6401 if (pVCpu->iem.s.uVexLength)
6402 {
6403 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6404 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
6405 IEM_MC_ARG(uint64_t *, puDst, 0);
6406 IEM_MC_LOCAL(RTUINT256U, uSrc);
6407 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
6408 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6409 IEM_MC_PREPARE_AVX_USAGE();
6410 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6411 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6412 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
6413 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
6414 IEM_MC_ADVANCE_RIP_AND_FINISH();
6415 IEM_MC_END();
6416 }
6417 else
6418 {
6419 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6420 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6421 IEM_MC_ARG(uint64_t *, puDst, 0);
6422 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6423 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6424 IEM_MC_PREPARE_AVX_USAGE();
6425 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6426 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6427 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 }
6431 }
6432 else
6433 IEMOP_RAISE_INVALID_OPCODE_RET();
6434}
6435
6436
6437/* Opcode VEX.F3.0F 0xd7 - invalid */
6438/* Opcode VEX.F2.0F 0xd7 - invalid */
6439
6440
6441/* Opcode VEX.0F 0xd8 - invalid */
6442
6443/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
6444FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
6445{
6446 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6447 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
6448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6449}
6450
6451
6452/* Opcode VEX.F3.0F 0xd8 - invalid */
6453/* Opcode VEX.F2.0F 0xd8 - invalid */
6454
6455/* Opcode VEX.0F 0xd9 - invalid */
6456
6457
6458/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
6459FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
6460{
6461 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6462 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
6463 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6464}
6465
6466
6467/* Opcode VEX.F3.0F 0xd9 - invalid */
6468/* Opcode VEX.F2.0F 0xd9 - invalid */
6469
6470/* Opcode VEX.0F 0xda - invalid */
6471
6472
6473/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6474FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6475{
6476 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6477 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6478 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6479}
6480
6481
6482/* Opcode VEX.F3.0F 0xda - invalid */
6483/* Opcode VEX.F2.0F 0xda - invalid */
6484
6485/* Opcode VEX.0F 0xdb - invalid */
6486
6487
6488/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6489FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6490{
6491 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6492 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6493 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6494}
6495
6496
6497/* Opcode VEX.F3.0F 0xdb - invalid */
6498/* Opcode VEX.F2.0F 0xdb - invalid */
6499
6500/* Opcode VEX.0F 0xdc - invalid */
6501
6502
6503/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6504FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6505{
6506 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6507 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6509}
6510
6511
6512/* Opcode VEX.F3.0F 0xdc - invalid */
6513/* Opcode VEX.F2.0F 0xdc - invalid */
6514
6515/* Opcode VEX.0F 0xdd - invalid */
6516
6517
6518/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6519FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6520{
6521 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6522 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6523 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6524}
6525
6526
6527/* Opcode VEX.F3.0F 0xdd - invalid */
6528/* Opcode VEX.F2.0F 0xdd - invalid */
6529
6530/* Opcode VEX.0F 0xde - invalid */
6531
6532
6533/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6534FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6535{
6536 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6537 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6538 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6539}
6540
6541
6542/* Opcode VEX.F3.0F 0xde - invalid */
6543/* Opcode VEX.F2.0F 0xde - invalid */
6544
6545/* Opcode VEX.0F 0xdf - invalid */
6546
6547
6548/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6549FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6550{
6551 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6552 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6553 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6554}
6555
6556
6557/* Opcode VEX.F3.0F 0xdf - invalid */
6558/* Opcode VEX.F2.0F 0xdf - invalid */
6559
6560/* Opcode VEX.0F 0xe0 - invalid */
6561
6562
6563/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6564FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6565{
6566 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6567 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6568 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6569}
6570
6571
6572/* Opcode VEX.F3.0F 0xe0 - invalid */
6573/* Opcode VEX.F2.0F 0xe0 - invalid */
6574
6575/* Opcode VEX.0F 0xe1 - invalid */
6576/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6577FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6578{
6579 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6580 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6581 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6582}
6583
6584/* Opcode VEX.F3.0F 0xe1 - invalid */
6585/* Opcode VEX.F2.0F 0xe1 - invalid */
6586
6587/* Opcode VEX.0F 0xe2 - invalid */
6588/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6589FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6590{
6591 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6592 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6593 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6594}
6595
6596/* Opcode VEX.F3.0F 0xe2 - invalid */
6597/* Opcode VEX.F2.0F 0xe2 - invalid */
6598
6599/* Opcode VEX.0F 0xe3 - invalid */
6600
6601
6602/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6603FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6604{
6605 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6606 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6607 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6608}
6609
6610
6611/* Opcode VEX.F3.0F 0xe3 - invalid */
6612/* Opcode VEX.F2.0F 0xe3 - invalid */
6613
6614/* Opcode VEX.0F 0xe4 - invalid */
6615
6616
6617/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6618FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6619{
6620 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6621 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6623}
6624
6625
6626/* Opcode VEX.F3.0F 0xe4 - invalid */
6627/* Opcode VEX.F2.0F 0xe4 - invalid */
6628
6629/* Opcode VEX.0F 0xe5 - invalid */
6630
6631
6632/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6633FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6634{
6635 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6636 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6637 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6638}
6639
6640
6641/* Opcode VEX.F3.0F 0xe5 - invalid */
6642/* Opcode VEX.F2.0F 0xe5 - invalid */
6643
6644/* Opcode VEX.0F 0xe6 - invalid */
6645
6646
6647/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6648FNIEMOP_DEF(iemOp_vcvttpd2dq_Vx_Wpd)
6649{
6650 IEMOP_MNEMONIC2(VEX_RM, VCVTTPD2DQ, vcvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6652 if (IEM_IS_MODRM_REG_MODE(bRm))
6653 {
6654 /*
6655 * Register, register.
6656 */
6657 if (pVCpu->iem.s.uVexLength)
6658 {
6659 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6660 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6661 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6662 IEM_MC_PREPARE_AVX_USAGE();
6663
6664 IEM_MC_LOCAL( X86YMMREG, uSrc);
6665 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6666 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6667 IEM_MC_LOCAL( X86XMMREG, uDst);
6668 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6669 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6670 iemAImpl_vcvttpd2dq_u128_u256,
6671 iemAImpl_vcvttpd2dq_u128_u256_fallback),
6672 puDst, puSrc);
6673 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6674 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6675 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6676 IEM_MC_ADVANCE_RIP_AND_FINISH();
6677 IEM_MC_END();
6678 }
6679 else
6680 {
6681 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6682 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6683 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6684 IEM_MC_PREPARE_AVX_USAGE();
6685
6686 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6687 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6688 IEM_MC_LOCAL( X86XMMREG, uDst);
6689 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6690 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6691 iemAImpl_vcvttpd2dq_u128_u128,
6692 iemAImpl_vcvttpd2dq_u128_u128_fallback),
6693 puDst, puSrc);
6694 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6695 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6696 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6697 IEM_MC_ADVANCE_RIP_AND_FINISH();
6698 IEM_MC_END();
6699 }
6700 }
6701 else
6702 {
6703 /*
6704 * Register, memory.
6705 */
6706 if (pVCpu->iem.s.uVexLength)
6707 {
6708 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6713 IEM_MC_PREPARE_AVX_USAGE();
6714
6715 IEM_MC_LOCAL( X86YMMREG, uSrc);
6716 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6717 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6718 IEM_MC_LOCAL( X86XMMREG, uDst);
6719 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6720 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6721 iemAImpl_vcvttpd2dq_u128_u256,
6722 iemAImpl_vcvttpd2dq_u128_u256_fallback),
6723 puDst, puSrc);
6724 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6725 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6726 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6727 IEM_MC_ADVANCE_RIP_AND_FINISH();
6728 IEM_MC_END();
6729 }
6730 else
6731 {
6732 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6735 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6736 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6737 IEM_MC_PREPARE_AVX_USAGE();
6738
6739 IEM_MC_LOCAL(X86XMMREG, uSrc);
6740 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6741 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6742 IEM_MC_LOCAL( X86XMMREG, uDst);
6743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6744 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6745 iemAImpl_vcvttpd2dq_u128_u128,
6746 iemAImpl_vcvttpd2dq_u128_u128_fallback),
6747 puDst, puSrc);
6748 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6749 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6750 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6751 IEM_MC_ADVANCE_RIP_AND_FINISH();
6752 IEM_MC_END();
6753 }
6754 }
6755}
6756
6757
6758/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6759FNIEMOP_DEF(iemOp_vcvtdq2pd_Vx_Wpd)
6760{
6761 IEMOP_MNEMONIC2(VEX_RM, VCVTDQ2PD, vcvtdq2pd, Vpd, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6763 if (IEM_IS_MODRM_REG_MODE(bRm))
6764 {
6765 /*
6766 * Register, register.
6767 */
6768 if (pVCpu->iem.s.uVexLength)
6769 {
6770 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6771 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6772 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6773 IEM_MC_PREPARE_AVX_USAGE();
6774
6775 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6776 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6777 IEM_MC_LOCAL( X86YMMREG, uDst);
6778 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
6779 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6780 iemAImpl_vcvtdq2pd_u256_u128,
6781 iemAImpl_vcvtdq2pd_u256_u128_fallback),
6782 puDst, puSrc);
6783 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6784 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6785 IEM_MC_ADVANCE_RIP_AND_FINISH();
6786 IEM_MC_END();
6787 }
6788 else
6789 {
6790 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6791 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6792 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6793 IEM_MC_PREPARE_AVX_USAGE();
6794
6795 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
6796 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
6797 IEM_MC_LOCAL( X86XMMREG, uDst);
6798 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6799 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6800 iemAImpl_vcvtdq2pd_u128_u64,
6801 iemAImpl_vcvtdq2pd_u128_u64_fallback),
6802 puDst, pu64Src);
6803 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6804 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6805 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6806 IEM_MC_ADVANCE_RIP_AND_FINISH();
6807 IEM_MC_END();
6808 }
6809 }
6810 else
6811 {
6812 /*
6813 * Register, memory.
6814 */
6815 if (pVCpu->iem.s.uVexLength)
6816 {
6817 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6822 IEM_MC_PREPARE_AVX_USAGE();
6823
6824 IEM_MC_LOCAL(X86XMMREG, uSrc);
6825 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6826 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6827 IEM_MC_LOCAL(X86YMMREG, uDst);
6828 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
6829 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6830 iemAImpl_vcvtdq2pd_u256_u128,
6831 iemAImpl_vcvtdq2pd_u256_u128_fallback),
6832 puDst, puSrc);
6833 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6834 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6835 IEM_MC_ADVANCE_RIP_AND_FINISH();
6836 IEM_MC_END();
6837 }
6838 else
6839 {
6840 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6843 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6844 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6845 IEM_MC_PREPARE_AVX_USAGE();
6846
6847 IEM_MC_LOCAL( uint64_t, u64Src);
6848 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
6849 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6850 IEM_MC_LOCAL( X86XMMREG, uDst);
6851 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6852 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6853 iemAImpl_vcvtdq2pd_u128_u64,
6854 iemAImpl_vcvtdq2pd_u128_u64_fallback),
6855 puDst, pu64Src);
6856 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6857 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6858 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6859 IEM_MC_ADVANCE_RIP_AND_FINISH();
6860 IEM_MC_END();
6861 }
6862 }
6863}
6864
6865
6866/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6867FNIEMOP_DEF(iemOp_vcvtpd2dq_Vx_Wpd)
6868{
6869 IEMOP_MNEMONIC2(VEX_RM, VCVTPD2DQ, vcvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6870 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6871 if (IEM_IS_MODRM_REG_MODE(bRm))
6872 {
6873 /*
6874 * Register, register.
6875 */
6876 if (pVCpu->iem.s.uVexLength)
6877 {
6878 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6879 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6880 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6881 IEM_MC_PREPARE_AVX_USAGE();
6882
6883 IEM_MC_LOCAL( X86YMMREG, uSrc);
6884 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6885 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6886 IEM_MC_LOCAL( X86XMMREG, uDst);
6887 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6888 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6889 iemAImpl_vcvtpd2dq_u128_u256,
6890 iemAImpl_vcvtpd2dq_u128_u256_fallback),
6891 puDst, puSrc);
6892 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6893 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6894 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6895 IEM_MC_ADVANCE_RIP_AND_FINISH();
6896 IEM_MC_END();
6897 }
6898 else
6899 {
6900 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6901 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6902 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6903 IEM_MC_PREPARE_AVX_USAGE();
6904
6905 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6906 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6907 IEM_MC_LOCAL( X86XMMREG, uDst);
6908 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6909 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6910 iemAImpl_vcvtpd2dq_u128_u128,
6911 iemAImpl_vcvtpd2dq_u128_u128_fallback),
6912 puDst, puSrc);
6913 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6914 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6915 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6916 IEM_MC_ADVANCE_RIP_AND_FINISH();
6917 IEM_MC_END();
6918 }
6919 }
6920 else
6921 {
6922 /*
6923 * Register, memory.
6924 */
6925 if (pVCpu->iem.s.uVexLength)
6926 {
6927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6930 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6932 IEM_MC_PREPARE_AVX_USAGE();
6933
6934 IEM_MC_LOCAL( X86YMMREG, uSrc);
6935 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6936 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6937 IEM_MC_LOCAL( X86XMMREG, uDst);
6938 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6939 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6940 iemAImpl_vcvtpd2dq_u128_u256,
6941 iemAImpl_vcvtpd2dq_u128_u256_fallback),
6942 puDst, puSrc);
6943 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6944 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6945 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6946 IEM_MC_ADVANCE_RIP_AND_FINISH();
6947 IEM_MC_END();
6948 }
6949 else
6950 {
6951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6954 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6955 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6956 IEM_MC_PREPARE_AVX_USAGE();
6957
6958 IEM_MC_LOCAL(X86XMMREG, uSrc);
6959 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6960 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6961 IEM_MC_LOCAL( X86XMMREG, uDst);
6962 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6963 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6964 iemAImpl_vcvtpd2dq_u128_u128,
6965 iemAImpl_vcvtpd2dq_u128_u128_fallback),
6966 puDst, puSrc);
6967 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
6968 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6969 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6970 IEM_MC_ADVANCE_RIP_AND_FINISH();
6971 IEM_MC_END();
6972 }
6973 }
6974}
6975
6976
6977/* Opcode VEX.0F 0xe7 - invalid */
6978
6979/**
6980 * @opcode 0xe7
6981 * @opcodesub !11 mr/reg
6982 * @oppfx 0x66
6983 * @opcpuid avx
6984 * @opgroup og_avx_cachect
6985 * @opxcpttype 1
6986 * @optest op1=-1 op2=2 -> op1=2
6987 * @optest op1=0 op2=-42 -> op1=-42
6988 */
6989FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6990{
6991 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6992 Assert(pVCpu->iem.s.uVexLength <= 1);
6993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6994 if (IEM_IS_MODRM_MEM_MODE(bRm))
6995 {
6996 if (pVCpu->iem.s.uVexLength == 0)
6997 {
6998 /*
6999 * 128-bit: Memory, register.
7000 */
7001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7002 IEM_MC_LOCAL(RTUINT128U, uSrc);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7004
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7006 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7007 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7008 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
7009
7010 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
7011 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7012
7013 IEM_MC_ADVANCE_RIP_AND_FINISH();
7014 IEM_MC_END();
7015 }
7016 else
7017 {
7018 /*
7019 * 256-bit: Memory, register.
7020 */
7021 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7022 IEM_MC_LOCAL(RTUINT256U, uSrc);
7023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7024
7025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7026 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7028 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
7029
7030 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
7031 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7032
7033 IEM_MC_ADVANCE_RIP_AND_FINISH();
7034 IEM_MC_END();
7035 }
7036 }
7037 /**
7038 * @opdone
7039 * @opmnemonic udvex660fe7reg
7040 * @opcode 0xe7
7041 * @opcodesub 11 mr/reg
7042 * @oppfx 0x66
7043 * @opunused immediate
7044 * @opcpuid avx
7045 * @optest ->
7046 */
7047 else
7048 IEMOP_RAISE_INVALID_OPCODE_RET();
7049}
7050
7051/* Opcode VEX.F3.0F 0xe7 - invalid */
7052/* Opcode VEX.F2.0F 0xe7 - invalid */
7053
7054
7055/* Opcode VEX.0F 0xe8 - invalid */
7056
7057
7058/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
7059FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
7060{
7061 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7062 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
7063 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7064}
7065
7066
7067/* Opcode VEX.F3.0F 0xe8 - invalid */
7068/* Opcode VEX.F2.0F 0xe8 - invalid */
7069
7070/* Opcode VEX.0F 0xe9 - invalid */
7071
7072
7073/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
7074FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
7075{
7076 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7077 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
7078 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7079}
7080
7081
7082/* Opcode VEX.F3.0F 0xe9 - invalid */
7083/* Opcode VEX.F2.0F 0xe9 - invalid */
7084
7085/* Opcode VEX.0F 0xea - invalid */
7086
7087
7088/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
7089FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
7090{
7091 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7092 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
7093 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7094}
7095
7096
7097/* Opcode VEX.F3.0F 0xea - invalid */
7098/* Opcode VEX.F2.0F 0xea - invalid */
7099
7100/* Opcode VEX.0F 0xeb - invalid */
7101
7102
7103/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
7104FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
7105{
7106 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
7108 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
7109}
7110
7111
7112
7113/* Opcode VEX.F3.0F 0xeb - invalid */
7114/* Opcode VEX.F2.0F 0xeb - invalid */
7115
7116/* Opcode VEX.0F 0xec - invalid */
7117
7118
7119/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
7120FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
7121{
7122 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7123 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
7124 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7125}
7126
7127
7128/* Opcode VEX.F3.0F 0xec - invalid */
7129/* Opcode VEX.F2.0F 0xec - invalid */
7130
7131/* Opcode VEX.0F 0xed - invalid */
7132
7133
7134/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
7135FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
7136{
7137 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7138 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
7139 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7140}
7141
7142
7143/* Opcode VEX.F3.0F 0xed - invalid */
7144/* Opcode VEX.F2.0F 0xed - invalid */
7145
7146/* Opcode VEX.0F 0xee - invalid */
7147
7148
7149/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
7150FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
7151{
7152 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7153 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
7154 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7155}
7156
7157
7158/* Opcode VEX.F3.0F 0xee - invalid */
7159/* Opcode VEX.F2.0F 0xee - invalid */
7160
7161
7162/* Opcode VEX.0F 0xef - invalid */
7163
7164
7165/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
7166FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7167{
7168 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7169 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
7170 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
7171}
7172
7173
7174/* Opcode VEX.F3.0F 0xef - invalid */
7175/* Opcode VEX.F2.0F 0xef - invalid */
7176
7177/* Opcode VEX.0F 0xf0 - invalid */
7178/* Opcode VEX.66.0F 0xf0 - invalid */
7179
7180
7181/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
7182FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
7183{
7184 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
7185 Assert(pVCpu->iem.s.uVexLength <= 1);
7186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7187 if (IEM_IS_MODRM_REG_MODE(bRm))
7188 {
7189 /*
7190 * Register, register - (not implemented, assuming it raises \#UD).
7191 */
7192 IEMOP_RAISE_INVALID_OPCODE_RET();
7193 }
7194 else if (pVCpu->iem.s.uVexLength == 0)
7195 {
7196 /*
7197 * Register, memory128.
7198 */
7199 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7200 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7202
7203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7204 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7205 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7206 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
7207
7208 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7209 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
7210
7211 IEM_MC_ADVANCE_RIP_AND_FINISH();
7212 IEM_MC_END();
7213 }
7214 else
7215 {
7216 /*
7217 * Register, memory256.
7218 */
7219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7220 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
7221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7222
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7224 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
7227
7228 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7229 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
7230
7231 IEM_MC_ADVANCE_RIP_AND_FINISH();
7232 IEM_MC_END();
7233 }
7234}
7235
7236
7237/* Opcode VEX.0F 0xf1 - invalid */
7238/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
7239FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
7240{
7241 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7242 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
7243 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7244}
7245
7246/* Opcode VEX.F2.0F 0xf1 - invalid */
7247
7248/* Opcode VEX.0F 0xf2 - invalid */
7249/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
7250FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
7251{
7252 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7253 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
7254 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7255}
7256/* Opcode VEX.F2.0F 0xf2 - invalid */
7257
7258/* Opcode VEX.0F 0xf3 - invalid */
7259/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
7260FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
7261{
7262 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7263 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
7264 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7265}
7266/* Opcode VEX.F2.0F 0xf3 - invalid */
7267
7268/* Opcode VEX.0F 0xf4 - invalid */
7269
7270
7271/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
7272FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
7273{
7274 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7275 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
7276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7277}
7278
7279
7280/* Opcode VEX.F2.0F 0xf4 - invalid */
7281
7282/* Opcode VEX.0F 0xf5 - invalid */
7283
7284
7285/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
7286FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
7287{
7288 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7289 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
7290 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7291}
7292
7293
7294/* Opcode VEX.F2.0F 0xf5 - invalid */
7295
7296/* Opcode VEX.0F 0xf6 - invalid */
7297
7298
7299/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
7300FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
7301{
7302 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7303 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
7304 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7305}
7306
7307
7308/* Opcode VEX.F2.0F 0xf6 - invalid */
7309
7310/* Opcode VEX.0F 0xf7 - invalid */
7311
7312
7313/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
7314FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
7315{
7316// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
7317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7318 if (IEM_IS_MODRM_REG_MODE(bRm))
7319 {
7320 /*
7321 * XMM, XMM, (implicit) [ ER]DI
7322 */
7323 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7324 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
7325 IEM_MC_LOCAL( uint64_t, u64EffAddr);
7326 IEM_MC_LOCAL( RTUINT128U, u128Mem);
7327 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
7328 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
7329 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
7330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7331 IEM_MC_PREPARE_AVX_USAGE();
7332
7333 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
7334 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
7335 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
7336 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
7337 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
7338 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
7339
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 else
7344 {
7345 /* The memory, register encoding is invalid. */
7346 IEMOP_RAISE_INVALID_OPCODE_RET();
7347 }
7348}
7349
7350
7351/* Opcode VEX.F2.0F 0xf7 - invalid */
7352
7353/* Opcode VEX.0F 0xf8 - invalid */
7354
7355
7356/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
7357FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
7358{
7359 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7360 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
7361 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7362}
7363
7364
7365/* Opcode VEX.F2.0F 0xf8 - invalid */
7366
7367/* Opcode VEX.0F 0xf9 - invalid */
7368
7369
7370/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
7371FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
7372{
7373 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7374 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
7375 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7376}
7377
7378
7379/* Opcode VEX.F2.0F 0xf9 - invalid */
7380
7381/* Opcode VEX.0F 0xfa - invalid */
7382
7383
7384/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
7385FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
7386{
7387 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7388 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
7389 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7390}
7391
7392
7393/* Opcode VEX.F2.0F 0xfa - invalid */
7394
7395/* Opcode VEX.0F 0xfb - invalid */
7396
7397
7398/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
7399FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
7400{
7401 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7402 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
7403 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7404}
7405
7406
7407/* Opcode VEX.F2.0F 0xfb - invalid */
7408
7409/* Opcode VEX.0F 0xfc - invalid */
7410
7411
7412/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
7413FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
7414{
7415 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7416 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
7417 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7418}
7419
7420
7421/* Opcode VEX.F2.0F 0xfc - invalid */
7422
7423/* Opcode VEX.0F 0xfd - invalid */
7424
7425
7426/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
7427FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
7428{
7429 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7430 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
7431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7432}
7433
7434
7435/* Opcode VEX.F2.0F 0xfd - invalid */
7436
7437/* Opcode VEX.0F 0xfe - invalid */
7438
7439
7440/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
7441FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
7442{
7443 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7444 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
7445 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7446}
7447
7448
7449/* Opcode VEX.F2.0F 0xfe - invalid */
7450
7451
7452/** Opcode **** 0x0f 0xff - UD0 */
7453FNIEMOP_DEF(iemOp_vud0)
7454{
7455/** @todo testcase: vud0 */
7456 IEMOP_MNEMONIC(vud0, "vud0");
7457 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7458 {
7459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7460 if (IEM_IS_MODRM_MEM_MODE(bRm))
7461 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
7462 }
7463 IEMOP_HLP_DONE_DECODING();
7464 IEMOP_RAISE_INVALID_OPCODE_RET();
7465}
7466
7467
7468
7469/**
7470 * VEX opcode map \#1.
7471 *
7472 * @sa g_apfnTwoByteMap
7473 */
7474const PFNIEMOP g_apfnVexMap1[] =
7475{
7476 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7477 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7478 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7479 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7480 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7481 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7482 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7483 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7484 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7485 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7486 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7487 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7488 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
7489 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7490 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7491 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7492 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7493
7494 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
7495 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
7496 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7497 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7498 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7499 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7500 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7501 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7502 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7503 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7504 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7505 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7506 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7507 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7508 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7509 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7510
7511 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7512 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7513 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7514 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7515 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7516 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7517 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7518 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7519 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7520 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7521 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7522 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7523 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7524 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7525 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7526 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7527
7528 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7529 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7530 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7531 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7532 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7533 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7534 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7535 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7536 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7537 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7538 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7539 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7540 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7541 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7542 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7543 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7544
7545 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7546 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7547 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7548 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7549 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7550 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7551 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7552 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7553 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7554 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7555 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7556 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7557 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7558 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7559 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7560 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7561
7562 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7563 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7564 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7565 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7566 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7567 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7568 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7569 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7570 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7571 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7572 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7573 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7574 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7575 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7576 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7577 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7578
7579 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7580 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7581 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7582 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7583 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7584 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7585 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7586 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7587 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7588 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7589 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7590 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7591 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7592 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7593 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7594 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7595
7596 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7597 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7598 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7599 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7600 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7601 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7602 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7603 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7604 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
7605 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
7606 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
7607 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
7608 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7609 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7610 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7611 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7612
7613 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
7614 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
7615 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
7616 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
7617 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
7618 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
7619 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
7620 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
7621 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
7622 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
7623 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
7624 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
7625 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
7626 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
7627 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
7628 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
7629
7630 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
7631 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
7632 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
7633 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
7634 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
7635 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
7636 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
7637 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
7638 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
7639 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
7640 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
7641 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
7642 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
7643 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
7644 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
7645 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
7646
7647 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7648 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7649 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7650 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7651 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7652 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7653 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7654 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7655 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7656 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7657 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
7658 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
7659 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
7660 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
7661 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
7662 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
7663
7664 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7665 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7666 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7667 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7668 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7669 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7670 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7671 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7672 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7673 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7674 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
7675 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
7676 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
7677 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
7678 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
7679 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
7680
7681 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7682 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7683 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7684 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7685 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7686 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7687 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7688 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7689 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7690 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7691 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
7692 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
7693 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
7694 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
7695 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
7696 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
7697
7698 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7699 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7700 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7701 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7702 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7703 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7704 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7705 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7706 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7707 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7708 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7709 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7710 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7711 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7712 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7713 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7714
7715 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7716 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7717 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7718 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7719 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7720 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7721 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7722 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7723 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7724 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7725 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7726 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7727 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7728 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7729 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7730 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7731
7732 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7733 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7734 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7735 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7736 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7737 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7738 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7739 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7740 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7741 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7742 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7743 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7744 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7745 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7746 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7747 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
7748};
7749AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
7750/** @} */
7751
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette