VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105215

Last change on this file since 105215 was 105215, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vsubpd instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 227.7 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105215 2024-07-09 08:34:27Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
68 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
69 IEM_MC_ADVANCE_RIP_AND_FINISH();
70 IEM_MC_END();
71 }
72 else
73 {
74 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(X86XMMREG, uDst);
80 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
81 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
82 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
84 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
85 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
86 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
87 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
88 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92 }
93 else
94 {
95 /*
96 * Register, memory.
97 */
98 if (pVCpu->iem.s.uVexLength)
99 {
100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
103 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
105 IEM_MC_PREPARE_AVX_USAGE();
106
107 IEM_MC_LOCAL(X86YMMREG, uSrc2);
108 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
109 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
110 IEM_MC_LOCAL(X86YMMREG, uSrc1);
111 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
112 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
113 IEM_MC_LOCAL(X86YMMREG, uDst);
114 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
115 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
116 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
117 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
118 IEM_MC_ADVANCE_RIP_AND_FINISH();
119 IEM_MC_END();
120 }
121 else
122 {
123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
126 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
127 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
128 IEM_MC_PREPARE_AVX_USAGE();
129
130 IEM_MC_LOCAL(X86XMMREG, uDst);
131 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
132 IEM_MC_LOCAL(X86XMMREG, uSrc2);
133 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
134 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
135 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
137
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
140 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
141 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_ADVANCE_RIP_AND_FINISH();
143 IEM_MC_END();
144 }
145 }
146}
147
148
149/**
150 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,addsd,etc.):
151 * - vxxxs{s,d} xmm0, xmm1, xmm2/mem32
152 *
153 * Exceptions type 4. AVX cpuid check for 128-bit operation.
154 * Ignores VEX.L, from SDM:
155 * Software should ensure VADDSS is encoded with VEX.L=0.
156 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
157 * across different processor generations.
158 */
159FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
160{
161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
162 if (IEM_IS_MODRM_REG_MODE(bRm))
163 {
164 /*
165 * Register, register.
166 */
167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
170 IEM_MC_PREPARE_AVX_USAGE();
171
172 IEM_MC_LOCAL(X86XMMREG, uDst);
173 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
174 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
175 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
176 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
177 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
178 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
179 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
180 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 /*
188 * Register, memory.
189 */
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
195 IEM_MC_PREPARE_AVX_USAGE();
196
197 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
198 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
199 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
200 IEM_MC_LOCAL(X86XMMREG, uDst);
201 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
202 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
203 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
204 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
205 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
206 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
207 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
208 IEM_MC_ADVANCE_RIP_AND_FINISH();
209 IEM_MC_END();
210 }
211}
212
213
214/**
215 * Common worker for AVX2 instructions on the forms:
216 * - vpxxx xmm0, xmm1, xmm2/mem128
217 * - vpxxx ymm0, ymm1, ymm2/mem256
218 *
219 * Takes function table for function w/o implicit state parameter.
220 *
221 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
222 */
223FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
224{
225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
226 if (IEM_IS_MODRM_REG_MODE(bRm))
227 {
228 /*
229 * Register, register.
230 */
231 if (pVCpu->iem.s.uVexLength)
232 {
233 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
234 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
235 IEM_MC_LOCAL(RTUINT256U, uDst);
236 IEM_MC_LOCAL(RTUINT256U, uSrc1);
237 IEM_MC_LOCAL(RTUINT256U, uSrc2);
238 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
239 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
240 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
242 IEM_MC_PREPARE_AVX_USAGE();
243 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
244 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
246 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
247 IEM_MC_ADVANCE_RIP_AND_FINISH();
248 IEM_MC_END();
249 }
250 else
251 {
252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
253 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
254 IEM_MC_ARG(PRTUINT128U, puDst, 0);
255 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
256 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
257 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
258 IEM_MC_PREPARE_AVX_USAGE();
259 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
260 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
261 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
262 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
263 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
264 IEM_MC_ADVANCE_RIP_AND_FINISH();
265 IEM_MC_END();
266 }
267 }
268 else
269 {
270 /*
271 * Register, memory.
272 */
273 if (pVCpu->iem.s.uVexLength)
274 {
275 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
276 IEM_MC_LOCAL(RTUINT256U, uDst);
277 IEM_MC_LOCAL(RTUINT256U, uSrc1);
278 IEM_MC_LOCAL(RTUINT256U, uSrc2);
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
280 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
282 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
283
284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
285 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
286 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
287 IEM_MC_PREPARE_AVX_USAGE();
288
289 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
290 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
291 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
292 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
293
294 IEM_MC_ADVANCE_RIP_AND_FINISH();
295 IEM_MC_END();
296 }
297 else
298 {
299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
300 IEM_MC_LOCAL(RTUINT128U, uSrc2);
301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
302 IEM_MC_ARG(PRTUINT128U, puDst, 0);
303 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
305
306 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
307 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
308 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
309 IEM_MC_PREPARE_AVX_USAGE();
310
311 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
312 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
313 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
314 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
315 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
316
317 IEM_MC_ADVANCE_RIP_AND_FINISH();
318 IEM_MC_END();
319 }
320 }
321}
322
323
324/**
325 * Common worker for AVX2 instructions on the forms:
326 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
327 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
328 *
329 * The 128-bit memory version of this instruction may elect to skip fetching the
330 * lower 64 bits of the operand. We, however, do not.
331 *
332 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
333 */
334FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
335{
336 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
337}
338
339
340/**
341 * Common worker for AVX2 instructions on the forms:
342 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
343 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
344 *
345 * The 128-bit memory version of this instruction may elect to skip fetching the
346 * higher 64 bits of the operand. We, however, do not.
347 *
348 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
349 */
350FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
351{
352 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
353}
354
355
356/**
357 * Common worker for AVX2 instructions on the forms:
358 * - vpxxx xmm0, xmm1/mem128
359 * - vpxxx ymm0, ymm1/mem256
360 *
361 * Takes function table for function w/o implicit state parameter.
362 *
363 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
364 */
365FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
366{
367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
368 if (IEM_IS_MODRM_REG_MODE(bRm))
369 {
370 /*
371 * Register, register.
372 */
373 if (pVCpu->iem.s.uVexLength)
374 {
375 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
376 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
377 IEM_MC_LOCAL(RTUINT256U, uDst);
378 IEM_MC_LOCAL(RTUINT256U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
380 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
381 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
382 IEM_MC_PREPARE_AVX_USAGE();
383 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
384 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
385 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
386 IEM_MC_ADVANCE_RIP_AND_FINISH();
387 IEM_MC_END();
388 }
389 else
390 {
391 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
392 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
393 IEM_MC_ARG(PRTUINT128U, puDst, 0);
394 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
395 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
396 IEM_MC_PREPARE_AVX_USAGE();
397 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
398 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
399 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
400 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
401 IEM_MC_ADVANCE_RIP_AND_FINISH();
402 IEM_MC_END();
403 }
404 }
405 else
406 {
407 /*
408 * Register, memory.
409 */
410 if (pVCpu->iem.s.uVexLength)
411 {
412 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEM_MC_LOCAL(RTUINT256U, uDst);
414 IEM_MC_LOCAL(RTUINT256U, uSrc);
415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
416 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
417 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
418
419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
420 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
421 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
422 IEM_MC_PREPARE_AVX_USAGE();
423
424 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
425 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
426 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
427
428 IEM_MC_ADVANCE_RIP_AND_FINISH();
429 IEM_MC_END();
430 }
431 else
432 {
433 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
434 IEM_MC_LOCAL(RTUINT128U, uSrc);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436 IEM_MC_ARG(PRTUINT128U, puDst, 0);
437 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
438
439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
440 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
441 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
442 IEM_MC_PREPARE_AVX_USAGE();
443
444 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
445 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
446 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
447 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
448
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452 }
453}
454
455
456/* Opcode VEX.0F 0x00 - invalid */
457/* Opcode VEX.0F 0x01 - invalid */
458/* Opcode VEX.0F 0x02 - invalid */
459/* Opcode VEX.0F 0x03 - invalid */
460/* Opcode VEX.0F 0x04 - invalid */
461/* Opcode VEX.0F 0x05 - invalid */
462/* Opcode VEX.0F 0x06 - invalid */
463/* Opcode VEX.0F 0x07 - invalid */
464/* Opcode VEX.0F 0x08 - invalid */
465/* Opcode VEX.0F 0x09 - invalid */
466/* Opcode VEX.0F 0x0a - invalid */
467
468/** Opcode VEX.0F 0x0b. */
469FNIEMOP_DEF(iemOp_vud2)
470{
471 IEMOP_MNEMONIC(vud2, "vud2");
472 IEMOP_RAISE_INVALID_OPCODE_RET();
473}
474
475/* Opcode VEX.0F 0x0c - invalid */
476/* Opcode VEX.0F 0x0d - invalid */
477/* Opcode VEX.0F 0x0e - invalid */
478/* Opcode VEX.0F 0x0f - invalid */
479
480
481/**
482 * @opcode 0x10
483 * @oppfx none
484 * @opcpuid avx
485 * @opgroup og_avx_simdfp_datamove
486 * @opxcpttype 4UA
487 * @optest op1=1 op2=2 -> op1=2
488 * @optest op1=0 op2=-22 -> op1=-22
489 */
490FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
491{
492 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
493 Assert(pVCpu->iem.s.uVexLength <= 1);
494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
495 if (IEM_IS_MODRM_REG_MODE(bRm))
496 {
497 /*
498 * Register, register.
499 */
500 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
501 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
502 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
503 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
504 if (pVCpu->iem.s.uVexLength == 0)
505 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
506 IEM_GET_MODRM_RM(pVCpu, bRm));
507 else
508 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
509 IEM_GET_MODRM_RM(pVCpu, bRm));
510 IEM_MC_ADVANCE_RIP_AND_FINISH();
511 IEM_MC_END();
512 }
513 else if (pVCpu->iem.s.uVexLength == 0)
514 {
515 /*
516 * 128-bit: Register, Memory
517 */
518 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
519 IEM_MC_LOCAL(RTUINT128U, uSrc);
520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
521
522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
523 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
524 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
525 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
526
527 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
528 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
529
530 IEM_MC_ADVANCE_RIP_AND_FINISH();
531 IEM_MC_END();
532 }
533 else
534 {
535 /*
536 * 256-bit: Register, Memory
537 */
538 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
539 IEM_MC_LOCAL(RTUINT256U, uSrc);
540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
541
542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
543 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
544 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
545 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
546
547 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
548 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
549
550 IEM_MC_ADVANCE_RIP_AND_FINISH();
551 IEM_MC_END();
552 }
553}
554
555
556/**
557 * @opcode 0x10
558 * @oppfx 0x66
559 * @opcpuid avx
560 * @opgroup og_avx_simdfp_datamove
561 * @opxcpttype 4UA
562 * @optest op1=1 op2=2 -> op1=2
563 * @optest op1=0 op2=-22 -> op1=-22
564 */
565FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
566{
567 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
568 Assert(pVCpu->iem.s.uVexLength <= 1);
569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
570 if (IEM_IS_MODRM_REG_MODE(bRm))
571 {
572 /*
573 * Register, register.
574 */
575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
576 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
577 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
578 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
579 if (pVCpu->iem.s.uVexLength == 0)
580 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
581 IEM_GET_MODRM_RM(pVCpu, bRm));
582 else
583 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
584 IEM_GET_MODRM_RM(pVCpu, bRm));
585 IEM_MC_ADVANCE_RIP_AND_FINISH();
586 IEM_MC_END();
587 }
588 else if (pVCpu->iem.s.uVexLength == 0)
589 {
590 /*
591 * 128-bit: Memory, register.
592 */
593 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
594 IEM_MC_LOCAL(RTUINT128U, uSrc);
595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
596
597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
598 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
599 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
600 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
601
602 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
603 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
604
605 IEM_MC_ADVANCE_RIP_AND_FINISH();
606 IEM_MC_END();
607 }
608 else
609 {
610 /*
611 * 256-bit: Memory, register.
612 */
613 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
614 IEM_MC_LOCAL(RTUINT256U, uSrc);
615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
616
617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
618 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
619 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
620 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
621
622 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
623 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
624
625 IEM_MC_ADVANCE_RIP_AND_FINISH();
626 IEM_MC_END();
627 }
628}
629
630
631FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
632{
633 Assert(pVCpu->iem.s.uVexLength <= 1);
634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
635 if (IEM_IS_MODRM_REG_MODE(bRm))
636 {
637 /**
638 * @opcode 0x10
639 * @oppfx 0xf3
640 * @opcodesub 11 mr/reg
641 * @opcpuid avx
642 * @opgroup og_avx_simdfp_datamerge
643 * @opxcpttype 5
644 * @optest op1=1 op2=0 op3=2 -> op1=2
645 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
646 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
647 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
648 * @note HssHi refers to bits 127:32.
649 */
650 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
651 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
652 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
653 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
654 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
655 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
656 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
657 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
658 IEM_MC_ADVANCE_RIP_AND_FINISH();
659 IEM_MC_END();
660 }
661 else
662 {
663 /**
664 * @opdone
665 * @opcode 0x10
666 * @oppfx 0xf3
667 * @opcodesub !11 mr/reg
668 * @opcpuid avx
669 * @opgroup og_avx_simdfp_datamove
670 * @opxcpttype 5
671 * @opfunction iemOp_vmovss_Vss_Hss_Wss
672 * @optest op1=1 op2=2 -> op1=2
673 * @optest op1=0 op2=-22 -> op1=-22
674 */
675 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
676 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
677 IEM_MC_LOCAL(uint32_t, uSrc);
678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
679
680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
681 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
682 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
683 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
684
685 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
686 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
687
688 IEM_MC_ADVANCE_RIP_AND_FINISH();
689 IEM_MC_END();
690 }
691}
692
693
694FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
695{
696 Assert(pVCpu->iem.s.uVexLength <= 1);
697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
698 if (IEM_IS_MODRM_REG_MODE(bRm))
699 {
700 /**
701 * @opcode 0x10
702 * @oppfx 0xf2
703 * @opcodesub 11 mr/reg
704 * @opcpuid avx
705 * @opgroup og_avx_simdfp_datamerge
706 * @opxcpttype 5
707 * @optest op1=1 op2=0 op3=2 -> op1=2
708 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
709 * @optest op1=3 op2=-1 op3=0x77 ->
710 * op1=0xffffffffffffffff0000000000000077
711 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
712 */
713 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
714 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
715 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
716
717 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
718 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
719 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
720 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
721 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
722 IEM_MC_ADVANCE_RIP_AND_FINISH();
723 IEM_MC_END();
724 }
725 else
726 {
727 /**
728 * @opdone
729 * @opcode 0x10
730 * @oppfx 0xf2
731 * @opcodesub !11 mr/reg
732 * @opcpuid avx
733 * @opgroup og_avx_simdfp_datamove
734 * @opxcpttype 5
735 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
736 * @optest op1=1 op2=2 -> op1=2
737 * @optest op1=0 op2=-22 -> op1=-22
738 */
739 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
740 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
741 IEM_MC_LOCAL(uint64_t, uSrc);
742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
743
744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
745 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
746 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
747 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
748
749 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
750 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
751
752 IEM_MC_ADVANCE_RIP_AND_FINISH();
753 IEM_MC_END();
754 }
755}
756
757
758/**
759 * @opcode 0x11
760 * @oppfx none
761 * @opcpuid avx
762 * @opgroup og_avx_simdfp_datamove
763 * @opxcpttype 4UA
764 * @optest op1=1 op2=2 -> op1=2
765 * @optest op1=0 op2=-22 -> op1=-22
766 */
767FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
768{
769 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
770 Assert(pVCpu->iem.s.uVexLength <= 1);
771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
772 if (IEM_IS_MODRM_REG_MODE(bRm))
773 {
774 /*
775 * Register, register.
776 */
777 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
778 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
779 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
780 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
781 if (pVCpu->iem.s.uVexLength == 0)
782 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
783 IEM_GET_MODRM_REG(pVCpu, bRm));
784 else
785 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
786 IEM_GET_MODRM_REG(pVCpu, bRm));
787 IEM_MC_ADVANCE_RIP_AND_FINISH();
788 IEM_MC_END();
789 }
790 else if (pVCpu->iem.s.uVexLength == 0)
791 {
792 /*
793 * 128-bit: Memory, register.
794 */
795 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
796 IEM_MC_LOCAL(RTUINT128U, uSrc);
797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
798
799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
800 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
801 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
802 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
803
804 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
805 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
806
807 IEM_MC_ADVANCE_RIP_AND_FINISH();
808 IEM_MC_END();
809 }
810 else
811 {
812 /*
813 * 256-bit: Memory, register.
814 */
815 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
816 IEM_MC_LOCAL(RTUINT256U, uSrc);
817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
818
819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
823
824 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
826
827 IEM_MC_ADVANCE_RIP_AND_FINISH();
828 IEM_MC_END();
829 }
830}
831
832
833/**
834 * @opcode 0x11
835 * @oppfx 0x66
836 * @opcpuid avx
837 * @opgroup og_avx_simdfp_datamove
838 * @opxcpttype 4UA
839 * @optest op1=1 op2=2 -> op1=2
840 * @optest op1=0 op2=-22 -> op1=-22
841 */
842FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
843{
844 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
845 Assert(pVCpu->iem.s.uVexLength <= 1);
846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
847 if (IEM_IS_MODRM_REG_MODE(bRm))
848 {
849 /*
850 * Register, register.
851 */
852 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
853 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
854 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
855 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
856 if (pVCpu->iem.s.uVexLength == 0)
857 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
858 IEM_GET_MODRM_REG(pVCpu, bRm));
859 else
860 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
861 IEM_GET_MODRM_REG(pVCpu, bRm));
862 IEM_MC_ADVANCE_RIP_AND_FINISH();
863 IEM_MC_END();
864 }
865 else if (pVCpu->iem.s.uVexLength == 0)
866 {
867 /*
868 * 128-bit: Memory, register.
869 */
870 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
871 IEM_MC_LOCAL(RTUINT128U, uSrc);
872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
873
874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
875 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
877 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
878
879 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
880 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
881
882 IEM_MC_ADVANCE_RIP_AND_FINISH();
883 IEM_MC_END();
884 }
885 else
886 {
887 /*
888 * 256-bit: Memory, register.
889 */
890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
891 IEM_MC_LOCAL(RTUINT256U, uSrc);
892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
893
894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
895 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
896 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
897 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
898
899 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
900 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
901
902 IEM_MC_ADVANCE_RIP_AND_FINISH();
903 IEM_MC_END();
904 }
905}
906
907
908FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
909{
910 Assert(pVCpu->iem.s.uVexLength <= 1);
911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
912 if (IEM_IS_MODRM_REG_MODE(bRm))
913 {
914 /**
915 * @opcode 0x11
916 * @oppfx 0xf3
917 * @opcodesub 11 mr/reg
918 * @opcpuid avx
919 * @opgroup og_avx_simdfp_datamerge
920 * @opxcpttype 5
921 * @optest op1=1 op2=0 op3=2 -> op1=2
922 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
923 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
924 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
925 */
926 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
927 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
928 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
929
930 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
931 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
932 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
933 IEM_GET_MODRM_REG(pVCpu, bRm),
934 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
935 IEM_MC_ADVANCE_RIP_AND_FINISH();
936 IEM_MC_END();
937 }
938 else
939 {
940 /**
941 * @opdone
942 * @opcode 0x11
943 * @oppfx 0xf3
944 * @opcodesub !11 mr/reg
945 * @opcpuid avx
946 * @opgroup og_avx_simdfp_datamove
947 * @opxcpttype 5
948 * @opfunction iemOp_vmovss_Vss_Hss_Wss
949 * @optest op1=1 op2=2 -> op1=2
950 * @optest op1=0 op2=-22 -> op1=-22
951 */
952 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
953 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
954 IEM_MC_LOCAL(uint32_t, uSrc);
955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
956
957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
958 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
959 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
960 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
961
962 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
963 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
964
965 IEM_MC_ADVANCE_RIP_AND_FINISH();
966 IEM_MC_END();
967 }
968}
969
970
971FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
972{
973 Assert(pVCpu->iem.s.uVexLength <= 1);
974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
975 if (IEM_IS_MODRM_REG_MODE(bRm))
976 {
977 /**
978 * @opcode 0x11
979 * @oppfx 0xf2
980 * @opcodesub 11 mr/reg
981 * @opcpuid avx
982 * @opgroup og_avx_simdfp_datamerge
983 * @opxcpttype 5
984 * @optest op1=1 op2=0 op3=2 -> op1=2
985 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
986 * @optest op1=3 op2=-1 op3=0x77 ->
987 * op1=0xffffffffffffffff0000000000000077
988 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
989 */
990 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
991 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
992 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
993
994 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
995 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
996 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
997 IEM_GET_MODRM_REG(pVCpu, bRm),
998 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
999 IEM_MC_ADVANCE_RIP_AND_FINISH();
1000 IEM_MC_END();
1001 }
1002 else
1003 {
1004 /**
1005 * @opdone
1006 * @opcode 0x11
1007 * @oppfx 0xf2
1008 * @opcodesub !11 mr/reg
1009 * @opcpuid avx
1010 * @opgroup og_avx_simdfp_datamove
1011 * @opxcpttype 5
1012 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1013 * @optest op1=1 op2=2 -> op1=2
1014 * @optest op1=0 op2=-22 -> op1=-22
1015 */
1016 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1017 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1018 IEM_MC_LOCAL(uint64_t, uSrc);
1019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1020
1021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1022 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1023 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1024 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1025
1026 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1027 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1028
1029 IEM_MC_ADVANCE_RIP_AND_FINISH();
1030 IEM_MC_END();
1031 }
1032}
1033
1034
1035FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1036{
1037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1038 if (IEM_IS_MODRM_REG_MODE(bRm))
1039 {
1040 /**
1041 * @opcode 0x12
1042 * @opcodesub 11 mr/reg
1043 * @oppfx none
1044 * @opcpuid avx
1045 * @opgroup og_avx_simdfp_datamerge
1046 * @opxcpttype 7LZ
1047 * @optest op2=0x2200220122022203
1048 * op3=0x3304330533063307
1049 * -> op1=0x22002201220222033304330533063307
1050 * @optest op2=-1 op3=-42 -> op1=-42
1051 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1052 */
1053 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1054 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1055 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1056
1057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1059 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1060 IEM_GET_MODRM_RM(pVCpu, bRm),
1061 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1062
1063 IEM_MC_ADVANCE_RIP_AND_FINISH();
1064 IEM_MC_END();
1065 }
1066 else
1067 {
1068 /**
1069 * @opdone
1070 * @opcode 0x12
1071 * @opcodesub !11 mr/reg
1072 * @oppfx none
1073 * @opcpuid avx
1074 * @opgroup og_avx_simdfp_datamove
1075 * @opxcpttype 5LZ
1076 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1077 * @optest op1=1 op2=0 op3=0 -> op1=0
1078 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1079 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1080 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1081 */
1082 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1083
1084 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1085 IEM_MC_LOCAL(uint64_t, uSrc);
1086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1087
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1089 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1091 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1092
1093 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1094 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1095 uSrc,
1096 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1097
1098 IEM_MC_ADVANCE_RIP_AND_FINISH();
1099 IEM_MC_END();
1100 }
1101}
1102
1103
1104/**
1105 * @opcode 0x12
1106 * @opcodesub !11 mr/reg
1107 * @oppfx 0x66
1108 * @opcpuid avx
1109 * @opgroup og_avx_pcksclr_datamerge
1110 * @opxcpttype 5LZ
1111 * @optest op2=0 op3=2 -> op1=2
1112 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1113 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1114 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1115 */
1116FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1117{
1118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1119 if (IEM_IS_MODRM_MEM_MODE(bRm))
1120 {
1121 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1122
1123 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1124 IEM_MC_LOCAL(uint64_t, uSrc);
1125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1126
1127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1128 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1129 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1130 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1131
1132 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1133 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1134 uSrc,
1135 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1136
1137 IEM_MC_ADVANCE_RIP_AND_FINISH();
1138 IEM_MC_END();
1139 }
1140
1141 /**
1142 * @opdone
1143 * @opmnemonic udvex660f12m3
1144 * @opcode 0x12
1145 * @opcodesub 11 mr/reg
1146 * @oppfx 0x66
1147 * @opunused immediate
1148 * @opcpuid avx
1149 * @optest ->
1150 */
1151 else
1152 IEMOP_RAISE_INVALID_OPCODE_RET();
1153}
1154
1155
1156/**
1157 * @opcode 0x12
1158 * @oppfx 0xf3
1159 * @opcpuid avx
1160 * @opgroup og_avx_pcksclr_datamove
1161 * @opxcpttype 4
1162 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1163 * -> op1=0x00000002000000020000000100000001
1164 * @optest vex.l==1 /
1165 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1166 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1167 */
1168FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1169{
1170 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1171 Assert(pVCpu->iem.s.uVexLength <= 1);
1172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1173 if (IEM_IS_MODRM_REG_MODE(bRm))
1174 {
1175 /*
1176 * Register, register.
1177 */
1178 if (pVCpu->iem.s.uVexLength == 0)
1179 {
1180 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1181 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1182 IEM_MC_LOCAL(RTUINT128U, uSrc);
1183
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1188 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1189 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1190 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1191 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1192 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1193
1194 IEM_MC_ADVANCE_RIP_AND_FINISH();
1195 IEM_MC_END();
1196 }
1197 else
1198 {
1199 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1200 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1202 IEM_MC_PREPARE_AVX_USAGE();
1203
1204 IEM_MC_LOCAL(RTUINT256U, uSrc);
1205 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1206 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1207 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1208 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1209 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1210 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1211 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1212 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1213 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1214 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1215
1216 IEM_MC_ADVANCE_RIP_AND_FINISH();
1217 IEM_MC_END();
1218 }
1219 }
1220 else
1221 {
1222 /*
1223 * Register, memory.
1224 */
1225 if (pVCpu->iem.s.uVexLength == 0)
1226 {
1227 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1228 IEM_MC_LOCAL(RTUINT128U, uSrc);
1229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1230
1231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1232 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1233 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1234 IEM_MC_PREPARE_AVX_USAGE();
1235
1236 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1237 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1238 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1239 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1240 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1241 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1242
1243 IEM_MC_ADVANCE_RIP_AND_FINISH();
1244 IEM_MC_END();
1245 }
1246 else
1247 {
1248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1251 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1252 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1253 IEM_MC_PREPARE_AVX_USAGE();
1254
1255 IEM_MC_LOCAL(RTUINT256U, uSrc);
1256 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1257
1258 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1259 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1260 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1261 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1262 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1263 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1264 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1265 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1266 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1267
1268 IEM_MC_ADVANCE_RIP_AND_FINISH();
1269 IEM_MC_END();
1270 }
1271 }
1272}
1273
1274
1275/**
1276 * @opcode 0x12
1277 * @oppfx 0xf2
1278 * @opcpuid avx
1279 * @opgroup og_avx_pcksclr_datamove
1280 * @opxcpttype 5
1281 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1282 * -> op1=0x22222222111111112222222211111111
1283 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1284 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1285 */
1286FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1287{
1288 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1290 if (IEM_IS_MODRM_REG_MODE(bRm))
1291 {
1292 /*
1293 * Register, register.
1294 */
1295 if (pVCpu->iem.s.uVexLength == 0)
1296 {
1297 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1298 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1299 IEM_MC_LOCAL(uint64_t, uSrc);
1300
1301 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1302 IEM_MC_PREPARE_AVX_USAGE();
1303
1304 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1305 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1306 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1307 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1308
1309 IEM_MC_ADVANCE_RIP_AND_FINISH();
1310 IEM_MC_END();
1311 }
1312 else
1313 {
1314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1315 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1316 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1317 IEM_MC_PREPARE_AVX_USAGE();
1318
1319 IEM_MC_LOCAL(uint64_t, uSrc1);
1320 IEM_MC_LOCAL(uint64_t, uSrc2);
1321 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1322 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1323
1324 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1325 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1326 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1327 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1328 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1329
1330 IEM_MC_ADVANCE_RIP_AND_FINISH();
1331 IEM_MC_END();
1332 }
1333 }
1334 else
1335 {
1336 /*
1337 * Register, memory.
1338 */
1339 if (pVCpu->iem.s.uVexLength == 0)
1340 {
1341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343 IEM_MC_LOCAL(uint64_t, uSrc);
1344
1345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1348 IEM_MC_PREPARE_AVX_USAGE();
1349
1350 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1351 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1352 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1353 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1354
1355 IEM_MC_ADVANCE_RIP_AND_FINISH();
1356 IEM_MC_END();
1357 }
1358 else
1359 {
1360 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1362
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1365 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1366 IEM_MC_PREPARE_AVX_USAGE();
1367
1368 IEM_MC_LOCAL(RTUINT256U, uSrc);
1369 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1370
1371 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1372 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1373 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1374 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1375 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1376
1377 IEM_MC_ADVANCE_RIP_AND_FINISH();
1378 IEM_MC_END();
1379 }
1380 }
1381}
1382
1383
1384/**
1385 * @opcode 0x13
1386 * @opcodesub !11 mr/reg
1387 * @oppfx none
1388 * @opcpuid avx
1389 * @opgroup og_avx_simdfp_datamove
1390 * @opxcpttype 5
1391 * @optest op1=1 op2=2 -> op1=2
1392 * @optest op1=0 op2=-42 -> op1=-42
1393 */
1394FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1395{
1396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1397 if (IEM_IS_MODRM_MEM_MODE(bRm))
1398 {
1399 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1400
1401 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1402 IEM_MC_LOCAL(uint64_t, uSrc);
1403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1404
1405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1406 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1407 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1408 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1409
1410 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1411 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1412
1413 IEM_MC_ADVANCE_RIP_AND_FINISH();
1414 IEM_MC_END();
1415 }
1416
1417 /**
1418 * @opdone
1419 * @opmnemonic udvex0f13m3
1420 * @opcode 0x13
1421 * @opcodesub 11 mr/reg
1422 * @oppfx none
1423 * @opunused immediate
1424 * @opcpuid avx
1425 * @optest ->
1426 */
1427 else
1428 IEMOP_RAISE_INVALID_OPCODE_RET();
1429}
1430
1431
1432/**
1433 * @opcode 0x13
1434 * @opcodesub !11 mr/reg
1435 * @oppfx 0x66
1436 * @opcpuid avx
1437 * @opgroup og_avx_pcksclr_datamove
1438 * @opxcpttype 5
1439 * @optest op1=1 op2=2 -> op1=2
1440 * @optest op1=0 op2=-42 -> op1=-42
1441 */
1442FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1443{
1444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1445 if (IEM_IS_MODRM_MEM_MODE(bRm))
1446 {
1447 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1448 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1449 IEM_MC_LOCAL(uint64_t, uSrc);
1450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1451
1452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1453 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1454 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1455 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1456
1457 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1458 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1459
1460 IEM_MC_ADVANCE_RIP_AND_FINISH();
1461 IEM_MC_END();
1462 }
1463
1464 /**
1465 * @opdone
1466 * @opmnemonic udvex660f13m3
1467 * @opcode 0x13
1468 * @opcodesub 11 mr/reg
1469 * @oppfx 0x66
1470 * @opunused immediate
1471 * @opcpuid avx
1472 * @optest ->
1473 */
1474 else
1475 IEMOP_RAISE_INVALID_OPCODE_RET();
1476}
1477
1478/* Opcode VEX.F3.0F 0x13 - invalid */
1479/* Opcode VEX.F2.0F 0x13 - invalid */
1480
1481/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1482FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1483{
1484 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1485 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1486 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1487}
1488
1489
1490/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1491FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1492{
1493 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1494 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1496}
1497
1498
1499/* Opcode VEX.F3.0F 0x14 - invalid */
1500/* Opcode VEX.F2.0F 0x14 - invalid */
1501
1502
1503/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1504FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1505{
1506 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1507 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1508 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1509}
1510
1511
1512/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1513FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1514{
1515 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1516 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1517 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1518}
1519
1520
1521/* Opcode VEX.F3.0F 0x15 - invalid */
1522/* Opcode VEX.F2.0F 0x15 - invalid */
1523
1524
1525FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1526{
1527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1528 if (IEM_IS_MODRM_REG_MODE(bRm))
1529 {
1530 /**
1531 * @opcode 0x16
1532 * @opcodesub 11 mr/reg
1533 * @oppfx none
1534 * @opcpuid avx
1535 * @opgroup og_avx_simdfp_datamerge
1536 * @opxcpttype 7LZ
1537 */
1538 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1539
1540 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1541 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1542
1543 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1544 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1545 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1546 IEM_GET_MODRM_RM(pVCpu, bRm),
1547 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1548
1549 IEM_MC_ADVANCE_RIP_AND_FINISH();
1550 IEM_MC_END();
1551 }
1552 else
1553 {
1554 /**
1555 * @opdone
1556 * @opcode 0x16
1557 * @opcodesub !11 mr/reg
1558 * @oppfx none
1559 * @opcpuid avx
1560 * @opgroup og_avx_simdfp_datamove
1561 * @opxcpttype 5LZ
1562 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1563 */
1564 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1565
1566 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1567 IEM_MC_LOCAL(uint64_t, uSrc);
1568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1569
1570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1571 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1573 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1574
1575 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1576 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1577 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1578 uSrc);
1579
1580 IEM_MC_ADVANCE_RIP_AND_FINISH();
1581 IEM_MC_END();
1582 }
1583}
1584
1585
1586/**
1587 * @opcode 0x16
1588 * @opcodesub !11 mr/reg
1589 * @oppfx 0x66
1590 * @opcpuid avx
1591 * @opgroup og_avx_pcksclr_datamerge
1592 * @opxcpttype 5LZ
1593 */
1594FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1595{
1596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1597 if (IEM_IS_MODRM_MEM_MODE(bRm))
1598 {
1599 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1600
1601 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1602 IEM_MC_LOCAL(uint64_t, uSrc);
1603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1604
1605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1606 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1607 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1608 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1609
1610 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1611 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1612 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1613 uSrc);
1614
1615 IEM_MC_ADVANCE_RIP_AND_FINISH();
1616 IEM_MC_END();
1617 }
1618
1619 /**
1620 * @opdone
1621 * @opmnemonic udvex660f16m3
1622 * @opcode 0x12
1623 * @opcodesub 11 mr/reg
1624 * @oppfx 0x66
1625 * @opunused immediate
1626 * @opcpuid avx
1627 * @optest ->
1628 */
1629 else
1630 IEMOP_RAISE_INVALID_OPCODE_RET();
1631}
1632
1633
1634/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1635/**
1636 * @opcode 0x16
1637 * @oppfx 0xf3
1638 * @opcpuid avx
1639 * @opgroup og_avx_pcksclr_datamove
1640 * @opxcpttype 4
1641 */
1642FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1643{
1644 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1645 Assert(pVCpu->iem.s.uVexLength <= 1);
1646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1647 if (IEM_IS_MODRM_REG_MODE(bRm))
1648 {
1649 /*
1650 * Register, register.
1651 */
1652 if (pVCpu->iem.s.uVexLength == 0)
1653 {
1654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1656 IEM_MC_LOCAL(RTUINT128U, uSrc);
1657
1658 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1659 IEM_MC_PREPARE_AVX_USAGE();
1660
1661 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1662 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1663 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1664 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1665 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1666 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1667
1668 IEM_MC_ADVANCE_RIP_AND_FINISH();
1669 IEM_MC_END();
1670 }
1671 else
1672 {
1673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1674 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1676 IEM_MC_PREPARE_AVX_USAGE();
1677
1678 IEM_MC_LOCAL(RTUINT256U, uSrc);
1679 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1680 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1681 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1682 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1683 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1684 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1685 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1686 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1687 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1688 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1689
1690 IEM_MC_ADVANCE_RIP_AND_FINISH();
1691 IEM_MC_END();
1692 }
1693 }
1694 else
1695 {
1696 /*
1697 * Register, memory.
1698 */
1699 if (pVCpu->iem.s.uVexLength == 0)
1700 {
1701 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1702 IEM_MC_LOCAL(RTUINT128U, uSrc);
1703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1704
1705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1706 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1707 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1708 IEM_MC_PREPARE_AVX_USAGE();
1709
1710 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1711 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1712 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1713 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1714 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1715 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1716
1717 IEM_MC_ADVANCE_RIP_AND_FINISH();
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1725 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1726 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1727 IEM_MC_PREPARE_AVX_USAGE();
1728
1729 IEM_MC_LOCAL(RTUINT256U, uSrc);
1730 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1731
1732 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1733 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1734 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1735 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1736 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1737 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1738 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1739 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1740 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1741
1742 IEM_MC_ADVANCE_RIP_AND_FINISH();
1743 IEM_MC_END();
1744 }
1745 }
1746}
1747
1748
1749/* Opcode VEX.F2.0F 0x16 - invalid */
1750
1751
1752/**
1753 * @opcode 0x17
1754 * @opcodesub !11 mr/reg
1755 * @oppfx none
1756 * @opcpuid avx
1757 * @opgroup og_avx_simdfp_datamove
1758 * @opxcpttype 5
1759 */
1760FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1761{
1762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1763 if (IEM_IS_MODRM_MEM_MODE(bRm))
1764 {
1765 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1766
1767 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1768 IEM_MC_LOCAL(uint64_t, uSrc);
1769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1770
1771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1772 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1773 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1774 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1775
1776 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1777 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1778
1779 IEM_MC_ADVANCE_RIP_AND_FINISH();
1780 IEM_MC_END();
1781 }
1782
1783 /**
1784 * @opdone
1785 * @opmnemonic udvex0f17m3
1786 * @opcode 0x17
1787 * @opcodesub 11 mr/reg
1788 * @oppfx none
1789 * @opunused immediate
1790 * @opcpuid avx
1791 * @optest ->
1792 */
1793 else
1794 IEMOP_RAISE_INVALID_OPCODE_RET();
1795}
1796
1797
1798/**
1799 * @opcode 0x17
1800 * @opcodesub !11 mr/reg
1801 * @oppfx 0x66
1802 * @opcpuid avx
1803 * @opgroup og_avx_pcksclr_datamove
1804 * @opxcpttype 5
1805 */
1806FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1807{
1808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1809 if (IEM_IS_MODRM_MEM_MODE(bRm))
1810 {
1811 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1812
1813 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1814 IEM_MC_LOCAL(uint64_t, uSrc);
1815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1816
1817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1818 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1819 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1820 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1821
1822 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1823 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1824
1825 IEM_MC_ADVANCE_RIP_AND_FINISH();
1826 IEM_MC_END();
1827 }
1828
1829 /**
1830 * @opdone
1831 * @opmnemonic udvex660f17m3
1832 * @opcode 0x17
1833 * @opcodesub 11 mr/reg
1834 * @oppfx 0x66
1835 * @opunused immediate
1836 * @opcpuid avx
1837 * @optest ->
1838 */
1839 else
1840 IEMOP_RAISE_INVALID_OPCODE_RET();
1841}
1842
1843
1844/* Opcode VEX.F3.0F 0x17 - invalid */
1845/* Opcode VEX.F2.0F 0x17 - invalid */
1846
1847
1848/* Opcode VEX.0F 0x18 - invalid */
1849/* Opcode VEX.0F 0x19 - invalid */
1850/* Opcode VEX.0F 0x1a - invalid */
1851/* Opcode VEX.0F 0x1b - invalid */
1852/* Opcode VEX.0F 0x1c - invalid */
1853/* Opcode VEX.0F 0x1d - invalid */
1854/* Opcode VEX.0F 0x1e - invalid */
1855/* Opcode VEX.0F 0x1f - invalid */
1856
1857/* Opcode VEX.0F 0x20 - invalid */
1858/* Opcode VEX.0F 0x21 - invalid */
1859/* Opcode VEX.0F 0x22 - invalid */
1860/* Opcode VEX.0F 0x23 - invalid */
1861/* Opcode VEX.0F 0x24 - invalid */
1862/* Opcode VEX.0F 0x25 - invalid */
1863/* Opcode VEX.0F 0x26 - invalid */
1864/* Opcode VEX.0F 0x27 - invalid */
1865
1866/**
1867 * @opcode 0x28
1868 * @oppfx none
1869 * @opcpuid avx
1870 * @opgroup og_avx_pcksclr_datamove
1871 * @opxcpttype 1
1872 * @optest op1=1 op2=2 -> op1=2
1873 * @optest op1=0 op2=-42 -> op1=-42
1874 * @note Almost identical to vmovapd.
1875 */
1876FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1877{
1878 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1880 Assert(pVCpu->iem.s.uVexLength <= 1);
1881 if (IEM_IS_MODRM_REG_MODE(bRm))
1882 {
1883 /*
1884 * Register, register.
1885 */
1886 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1887 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1888
1889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1890 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1891 if (pVCpu->iem.s.uVexLength == 0)
1892 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1893 IEM_GET_MODRM_RM(pVCpu, bRm));
1894 else
1895 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1896 IEM_GET_MODRM_RM(pVCpu, bRm));
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 if (pVCpu->iem.s.uVexLength == 0)
1906 {
1907 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1909 IEM_MC_LOCAL(RTUINT128U, uSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1913 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1914 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1915
1916 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1917 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1918
1919 IEM_MC_ADVANCE_RIP_AND_FINISH();
1920 IEM_MC_END();
1921 }
1922 else
1923 {
1924 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1926 IEM_MC_LOCAL(RTUINT256U, uSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1930 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1931 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1932
1933 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1934 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1935
1936 IEM_MC_ADVANCE_RIP_AND_FINISH();
1937 IEM_MC_END();
1938 }
1939 }
1940}
1941
1942
1943/**
1944 * @opcode 0x28
1945 * @oppfx 66
1946 * @opcpuid avx
1947 * @opgroup og_avx_pcksclr_datamove
1948 * @opxcpttype 1
1949 * @optest op1=1 op2=2 -> op1=2
1950 * @optest op1=0 op2=-42 -> op1=-42
1951 * @note Almost identical to vmovaps
1952 */
1953FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1954{
1955 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1957 Assert(pVCpu->iem.s.uVexLength <= 1);
1958 if (IEM_IS_MODRM_REG_MODE(bRm))
1959 {
1960 /*
1961 * Register, register.
1962 */
1963 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1964 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1965
1966 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1968 if (pVCpu->iem.s.uVexLength == 0)
1969 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1970 IEM_GET_MODRM_RM(pVCpu, bRm));
1971 else
1972 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1973 IEM_GET_MODRM_RM(pVCpu, bRm));
1974 IEM_MC_ADVANCE_RIP_AND_FINISH();
1975 IEM_MC_END();
1976 }
1977 else
1978 {
1979 /*
1980 * Register, memory.
1981 */
1982 if (pVCpu->iem.s.uVexLength == 0)
1983 {
1984 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1986 IEM_MC_LOCAL(RTUINT128U, uSrc);
1987
1988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1989 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1991 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1992
1993 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1994 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1995
1996 IEM_MC_ADVANCE_RIP_AND_FINISH();
1997 IEM_MC_END();
1998 }
1999 else
2000 {
2001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2003 IEM_MC_LOCAL(RTUINT256U, uSrc);
2004
2005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2006 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2007 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2008 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2009
2010 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2011 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2012
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 }
2016 }
2017}
2018
2019/**
2020 * @opmnemonic udvexf30f28
2021 * @opcode 0x28
2022 * @oppfx 0xf3
2023 * @opunused vex.modrm
2024 * @opcpuid avx
2025 * @optest ->
2026 * @opdone
2027 */
2028
2029/**
2030 * @opmnemonic udvexf20f28
2031 * @opcode 0x28
2032 * @oppfx 0xf2
2033 * @opunused vex.modrm
2034 * @opcpuid avx
2035 * @optest ->
2036 * @opdone
2037 */
2038
2039/**
2040 * @opcode 0x29
2041 * @oppfx none
2042 * @opcpuid avx
2043 * @opgroup og_avx_pcksclr_datamove
2044 * @opxcpttype 1
2045 * @optest op1=1 op2=2 -> op1=2
2046 * @optest op1=0 op2=-42 -> op1=-42
2047 * @note Almost identical to vmovapd.
2048 */
2049FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2050{
2051 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 Assert(pVCpu->iem.s.uVexLength <= 1);
2054 if (IEM_IS_MODRM_REG_MODE(bRm))
2055 {
2056 /*
2057 * Register, register.
2058 */
2059 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2060 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2061
2062 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2063 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2064 if (pVCpu->iem.s.uVexLength == 0)
2065 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2066 IEM_GET_MODRM_REG(pVCpu, bRm));
2067 else
2068 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2069 IEM_GET_MODRM_REG(pVCpu, bRm));
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 else
2074 {
2075 /*
2076 * Register, memory.
2077 */
2078 if (pVCpu->iem.s.uVexLength == 0)
2079 {
2080 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082 IEM_MC_LOCAL(RTUINT128U, uSrc);
2083
2084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2085 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2088
2089 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2090 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2091
2092 IEM_MC_ADVANCE_RIP_AND_FINISH();
2093 IEM_MC_END();
2094 }
2095 else
2096 {
2097 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2099 IEM_MC_LOCAL(RTUINT256U, uSrc);
2100
2101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2102 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2103 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2104 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2105
2106 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2107 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2108
2109 IEM_MC_ADVANCE_RIP_AND_FINISH();
2110 IEM_MC_END();
2111 }
2112 }
2113}
2114
2115/**
2116 * @opcode 0x29
2117 * @oppfx 66
2118 * @opcpuid avx
2119 * @opgroup og_avx_pcksclr_datamove
2120 * @opxcpttype 1
2121 * @optest op1=1 op2=2 -> op1=2
2122 * @optest op1=0 op2=-42 -> op1=-42
2123 * @note Almost identical to vmovaps
2124 */
2125FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2126{
2127 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2128 Assert(pVCpu->iem.s.uVexLength <= 1);
2129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2130 if (IEM_IS_MODRM_REG_MODE(bRm))
2131 {
2132 /*
2133 * Register, register.
2134 */
2135 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2136 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2137
2138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2140 if (pVCpu->iem.s.uVexLength == 0)
2141 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2142 IEM_GET_MODRM_REG(pVCpu, bRm));
2143 else
2144 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2145 IEM_GET_MODRM_REG(pVCpu, bRm));
2146 IEM_MC_ADVANCE_RIP_AND_FINISH();
2147 IEM_MC_END();
2148 }
2149 else
2150 {
2151 /*
2152 * Register, memory.
2153 */
2154 if (pVCpu->iem.s.uVexLength == 0)
2155 {
2156 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2158 IEM_MC_LOCAL(RTUINT128U, uSrc);
2159
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2161 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2162 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2163 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2164
2165 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2166 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2167
2168 IEM_MC_ADVANCE_RIP_AND_FINISH();
2169 IEM_MC_END();
2170 }
2171 else
2172 {
2173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2175 IEM_MC_LOCAL(RTUINT256U, uSrc);
2176
2177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2178 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2179 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2180 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2181
2182 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2183 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2184
2185 IEM_MC_ADVANCE_RIP_AND_FINISH();
2186 IEM_MC_END();
2187 }
2188 }
2189}
2190
2191
2192/**
2193 * @opmnemonic udvexf30f29
2194 * @opcode 0x29
2195 * @oppfx 0xf3
2196 * @opunused vex.modrm
2197 * @opcpuid avx
2198 * @optest ->
2199 * @opdone
2200 */
2201
2202/**
2203 * @opmnemonic udvexf20f29
2204 * @opcode 0x29
2205 * @oppfx 0xf2
2206 * @opunused vex.modrm
2207 * @opcpuid avx
2208 * @optest ->
2209 * @opdone
2210 */
2211
2212
2213/** Opcode VEX.0F 0x2a - invalid */
2214/** Opcode VEX.66.0F 0x2a - invalid */
2215/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2216FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2217/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2218FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2219
2220
2221/**
2222 * @opcode 0x2b
2223 * @opcodesub !11 mr/reg
2224 * @oppfx none
2225 * @opcpuid avx
2226 * @opgroup og_avx_cachect
2227 * @opxcpttype 1
2228 * @optest op1=1 op2=2 -> op1=2
2229 * @optest op1=0 op2=-42 -> op1=-42
2230 * @note Identical implementation to vmovntpd
2231 */
2232FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2233{
2234 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2235 Assert(pVCpu->iem.s.uVexLength <= 1);
2236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2237 if (IEM_IS_MODRM_MEM_MODE(bRm))
2238 {
2239 /*
2240 * memory, register.
2241 */
2242 if (pVCpu->iem.s.uVexLength == 0)
2243 {
2244 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2245 IEM_MC_LOCAL(RTUINT128U, uSrc);
2246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2247
2248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2249 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2251 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2252
2253 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2254 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2255
2256 IEM_MC_ADVANCE_RIP_AND_FINISH();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2262 IEM_MC_LOCAL(RTUINT256U, uSrc);
2263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2264
2265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2266 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2269
2270 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2271 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2272
2273 IEM_MC_ADVANCE_RIP_AND_FINISH();
2274 IEM_MC_END();
2275 }
2276 }
2277 /* The register, register encoding is invalid. */
2278 else
2279 IEMOP_RAISE_INVALID_OPCODE_RET();
2280}
2281
2282/**
2283 * @opcode 0x2b
2284 * @opcodesub !11 mr/reg
2285 * @oppfx 0x66
2286 * @opcpuid avx
2287 * @opgroup og_avx_cachect
2288 * @opxcpttype 1
2289 * @optest op1=1 op2=2 -> op1=2
2290 * @optest op1=0 op2=-42 -> op1=-42
2291 * @note Identical implementation to vmovntps
2292 */
2293FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2294{
2295 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2296 Assert(pVCpu->iem.s.uVexLength <= 1);
2297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2298 if (IEM_IS_MODRM_MEM_MODE(bRm))
2299 {
2300 /*
2301 * memory, register.
2302 */
2303 if (pVCpu->iem.s.uVexLength == 0)
2304 {
2305 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2306 IEM_MC_LOCAL(RTUINT128U, uSrc);
2307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2308
2309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2310 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2311 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2312 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2313
2314 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2315 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2316
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 else
2321 {
2322 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2323 IEM_MC_LOCAL(RTUINT256U, uSrc);
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2328 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2329 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2330
2331 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2332 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2333
2334 IEM_MC_ADVANCE_RIP_AND_FINISH();
2335 IEM_MC_END();
2336 }
2337 }
2338 /* The register, register encoding is invalid. */
2339 else
2340 IEMOP_RAISE_INVALID_OPCODE_RET();
2341}
2342
2343/**
2344 * @opmnemonic udvexf30f2b
2345 * @opcode 0x2b
2346 * @oppfx 0xf3
2347 * @opunused vex.modrm
2348 * @opcpuid avx
2349 * @optest ->
2350 * @opdone
2351 */
2352
2353/**
2354 * @opmnemonic udvexf20f2b
2355 * @opcode 0x2b
2356 * @oppfx 0xf2
2357 * @opunused vex.modrm
2358 * @opcpuid avx
2359 * @optest ->
2360 * @opdone
2361 */
2362
2363
2364/* Opcode VEX.0F 0x2c - invalid */
2365/* Opcode VEX.66.0F 0x2c - invalid */
2366/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2367FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2368/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2369FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2370
2371/* Opcode VEX.0F 0x2d - invalid */
2372/* Opcode VEX.66.0F 0x2d - invalid */
2373/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2374FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2375/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2376FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2377
2378
2379/**
2380 * @opcode 0x2e
2381 * @oppfx none
2382 * @opflmodify cf,pf,af,zf,sf,of
2383 * @opflclear af,sf,of
2384 */
2385FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2386{
2387 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2389 if (IEM_IS_MODRM_REG_MODE(bRm))
2390 {
2391 /*
2392 * Register, register.
2393 */
2394 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2395 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2396 IEM_MC_LOCAL(uint32_t, fEFlags);
2397 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2398 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2399 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2400 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2401 IEM_MC_PREPARE_AVX_USAGE();
2402 IEM_MC_FETCH_EFLAGS(fEFlags);
2403 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2404 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2405 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2406 pEFlags, uSrc1, uSrc2);
2407 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2408 IEM_MC_COMMIT_EFLAGS(fEFlags);
2409
2410 IEM_MC_ADVANCE_RIP_AND_FINISH();
2411 IEM_MC_END();
2412 }
2413 else
2414 {
2415 /*
2416 * Register, memory.
2417 */
2418 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2419 IEM_MC_LOCAL(uint32_t, fEFlags);
2420 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2421 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2422 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2424
2425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2426 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2428 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2429
2430 IEM_MC_PREPARE_AVX_USAGE();
2431 IEM_MC_FETCH_EFLAGS(fEFlags);
2432 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2433 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2434 pEFlags, uSrc1, uSrc2);
2435 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2436 IEM_MC_COMMIT_EFLAGS(fEFlags);
2437
2438 IEM_MC_ADVANCE_RIP_AND_FINISH();
2439 IEM_MC_END();
2440 }
2441}
2442
2443
2444/**
2445 * @opcode 0x2e
2446 * @oppfx 0x66
2447 * @opflmodify cf,pf,af,zf,sf,of
2448 * @opflclear af,sf,of
2449 */
2450FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2451{
2452 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2454 if (IEM_IS_MODRM_REG_MODE(bRm))
2455 {
2456 /*
2457 * Register, register.
2458 */
2459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2460 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2461 IEM_MC_LOCAL(uint32_t, fEFlags);
2462 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2463 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2464 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2465 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2466 IEM_MC_PREPARE_AVX_USAGE();
2467 IEM_MC_FETCH_EFLAGS(fEFlags);
2468 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2469 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2470 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2471 pEFlags, uSrc1, uSrc2);
2472 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2473 IEM_MC_COMMIT_EFLAGS(fEFlags);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /*
2481 * Register, memory.
2482 */
2483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2484 IEM_MC_LOCAL(uint32_t, fEFlags);
2485 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2486 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2487 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2489
2490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2491 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2492 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2493 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2494
2495 IEM_MC_PREPARE_AVX_USAGE();
2496 IEM_MC_FETCH_EFLAGS(fEFlags);
2497 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2498 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2499 pEFlags, uSrc1, uSrc2);
2500 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2501 IEM_MC_COMMIT_EFLAGS(fEFlags);
2502
2503 IEM_MC_ADVANCE_RIP_AND_FINISH();
2504 IEM_MC_END();
2505 }
2506}
2507
2508
2509/* Opcode VEX.F3.0F 0x2e - invalid */
2510/* Opcode VEX.F2.0F 0x2e - invalid */
2511
2512/**
2513 * @opcode 0x2f
2514 * @oppfx none
2515 * @opflmodify cf,pf,af,zf,sf,of
2516 * @opflclear af,sf,of
2517 */
2518FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2519{
2520 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2522 if (IEM_IS_MODRM_REG_MODE(bRm))
2523 {
2524 /*
2525 * Register, register.
2526 */
2527 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2528 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2529 IEM_MC_LOCAL(uint32_t, fEFlags);
2530 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2531 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2532 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2534 IEM_MC_PREPARE_AVX_USAGE();
2535 IEM_MC_FETCH_EFLAGS(fEFlags);
2536 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2537 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2538 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2539 pEFlags, uSrc1, uSrc2);
2540 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2541 IEM_MC_COMMIT_EFLAGS(fEFlags);
2542
2543 IEM_MC_ADVANCE_RIP_AND_FINISH();
2544 IEM_MC_END();
2545 }
2546 else
2547 {
2548 /*
2549 * Register, memory.
2550 */
2551 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2552 IEM_MC_LOCAL(uint32_t, fEFlags);
2553 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2554 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2555 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2557
2558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2559 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2560 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2561 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2562
2563 IEM_MC_PREPARE_AVX_USAGE();
2564 IEM_MC_FETCH_EFLAGS(fEFlags);
2565 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2566 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2567 pEFlags, uSrc1, uSrc2);
2568 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2569 IEM_MC_COMMIT_EFLAGS(fEFlags);
2570
2571 IEM_MC_ADVANCE_RIP_AND_FINISH();
2572 IEM_MC_END();
2573 }
2574}
2575
2576
2577/**
2578 * @opcode 0x2f
2579 * @oppfx 0x66
2580 * @opflmodify cf,pf,af,zf,sf,of
2581 * @opflclear af,sf,of
2582 */
2583FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2584{
2585 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2587 if (IEM_IS_MODRM_REG_MODE(bRm))
2588 {
2589 /*
2590 * Register, register.
2591 */
2592 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2593 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2594 IEM_MC_LOCAL(uint32_t, fEFlags);
2595 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2596 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2597 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2598 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2599 IEM_MC_PREPARE_AVX_USAGE();
2600 IEM_MC_FETCH_EFLAGS(fEFlags);
2601 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2602 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2603 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2604 pEFlags, uSrc1, uSrc2);
2605 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2606 IEM_MC_COMMIT_EFLAGS(fEFlags);
2607
2608 IEM_MC_ADVANCE_RIP_AND_FINISH();
2609 IEM_MC_END();
2610 }
2611 else
2612 {
2613 /*
2614 * Register, memory.
2615 */
2616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2617 IEM_MC_LOCAL(uint32_t, fEFlags);
2618 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2619 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2620 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2622
2623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2624 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2625 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2626 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2627
2628 IEM_MC_PREPARE_AVX_USAGE();
2629 IEM_MC_FETCH_EFLAGS(fEFlags);
2630 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2631 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2632 pEFlags, uSrc1, uSrc2);
2633 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2634 IEM_MC_COMMIT_EFLAGS(fEFlags);
2635
2636 IEM_MC_ADVANCE_RIP_AND_FINISH();
2637 IEM_MC_END();
2638 }
2639}
2640
2641
2642/* Opcode VEX.F3.0F 0x2f - invalid */
2643/* Opcode VEX.F2.0F 0x2f - invalid */
2644
2645/* Opcode VEX.0F 0x30 - invalid */
2646/* Opcode VEX.0F 0x31 - invalid */
2647/* Opcode VEX.0F 0x32 - invalid */
2648/* Opcode VEX.0F 0x33 - invalid */
2649/* Opcode VEX.0F 0x34 - invalid */
2650/* Opcode VEX.0F 0x35 - invalid */
2651/* Opcode VEX.0F 0x36 - invalid */
2652/* Opcode VEX.0F 0x37 - invalid */
2653/* Opcode VEX.0F 0x38 - invalid */
2654/* Opcode VEX.0F 0x39 - invalid */
2655/* Opcode VEX.0F 0x3a - invalid */
2656/* Opcode VEX.0F 0x3b - invalid */
2657/* Opcode VEX.0F 0x3c - invalid */
2658/* Opcode VEX.0F 0x3d - invalid */
2659/* Opcode VEX.0F 0x3e - invalid */
2660/* Opcode VEX.0F 0x3f - invalid */
2661/* Opcode VEX.0F 0x40 - invalid */
2662/* Opcode VEX.0F 0x41 - invalid */
2663/* Opcode VEX.0F 0x42 - invalid */
2664/* Opcode VEX.0F 0x43 - invalid */
2665/* Opcode VEX.0F 0x44 - invalid */
2666/* Opcode VEX.0F 0x45 - invalid */
2667/* Opcode VEX.0F 0x46 - invalid */
2668/* Opcode VEX.0F 0x47 - invalid */
2669/* Opcode VEX.0F 0x48 - invalid */
2670/* Opcode VEX.0F 0x49 - invalid */
2671/* Opcode VEX.0F 0x4a - invalid */
2672/* Opcode VEX.0F 0x4b - invalid */
2673/* Opcode VEX.0F 0x4c - invalid */
2674/* Opcode VEX.0F 0x4d - invalid */
2675/* Opcode VEX.0F 0x4e - invalid */
2676/* Opcode VEX.0F 0x4f - invalid */
2677
2678
2679/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2680FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2681{
2682 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2684 if (IEM_IS_MODRM_REG_MODE(bRm))
2685 {
2686 /*
2687 * Register, register.
2688 */
2689 if (pVCpu->iem.s.uVexLength == 0)
2690 {
2691 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2692 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2693 IEM_MC_LOCAL(uint8_t, u8Dst);
2694 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2695 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2696 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2697 IEM_MC_PREPARE_AVX_USAGE();
2698 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2699 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2700 pu8Dst, puSrc);
2701 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2702 IEM_MC_ADVANCE_RIP_AND_FINISH();
2703 IEM_MC_END();
2704 }
2705 else
2706 {
2707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2708 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2709 IEM_MC_LOCAL(uint8_t, u8Dst);
2710 IEM_MC_LOCAL(RTUINT256U, uSrc);
2711 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2712 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2713
2714 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2715 IEM_MC_PREPARE_AVX_USAGE();
2716 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2717 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2718 pu8Dst, puSrc);
2719 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2720 IEM_MC_ADVANCE_RIP_AND_FINISH();
2721 IEM_MC_END();
2722 }
2723 }
2724 /* No memory operand. */
2725 else
2726 IEMOP_RAISE_INVALID_OPCODE_RET();
2727}
2728
2729
2730/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2731FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2732{
2733 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2735 if (IEM_IS_MODRM_REG_MODE(bRm))
2736 {
2737 /*
2738 * Register, register.
2739 */
2740 if (pVCpu->iem.s.uVexLength == 0)
2741 {
2742 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2743 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2744 IEM_MC_LOCAL(uint8_t, u8Dst);
2745 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2746 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2747 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2748 IEM_MC_PREPARE_AVX_USAGE();
2749 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2750 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2751 pu8Dst, puSrc);
2752 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2753 IEM_MC_ADVANCE_RIP_AND_FINISH();
2754 IEM_MC_END();
2755 }
2756 else
2757 {
2758 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2759 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2760 IEM_MC_LOCAL(uint8_t, u8Dst);
2761 IEM_MC_LOCAL(RTUINT256U, uSrc);
2762 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2763 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2764
2765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2766 IEM_MC_PREPARE_AVX_USAGE();
2767 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2768 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2769 pu8Dst, puSrc);
2770 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2771 IEM_MC_ADVANCE_RIP_AND_FINISH();
2772 IEM_MC_END();
2773 }
2774 }
2775 /* No memory operand. */
2776 else
2777 IEMOP_RAISE_INVALID_OPCODE_RET();
2778}
2779
2780
2781/* Opcode VEX.F3.0F 0x50 - invalid */
2782/* Opcode VEX.F2.0F 0x50 - invalid */
2783
2784/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2785FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2786/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2787FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2788/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2789FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2790/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2791FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2792
2793/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2794FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2795/* Opcode VEX.66.0F 0x52 - invalid */
2796/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2797FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2798/* Opcode VEX.F2.0F 0x52 - invalid */
2799
2800/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2801FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2802/* Opcode VEX.66.0F 0x53 - invalid */
2803/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2804FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2805/* Opcode VEX.F2.0F 0x53 - invalid */
2806
2807
2808/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2809FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2810{
2811 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2812 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2813 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2814}
2815
2816
2817/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2818FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2819{
2820 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2821 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2822 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2823}
2824
2825
2826/* Opcode VEX.F3.0F 0x54 - invalid */
2827/* Opcode VEX.F2.0F 0x54 - invalid */
2828
2829
2830/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2831FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2832{
2833 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2834 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2835 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2836}
2837
2838
2839/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2840FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2841{
2842 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2843 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2844 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2845}
2846
2847
2848/* Opcode VEX.F3.0F 0x55 - invalid */
2849/* Opcode VEX.F2.0F 0x55 - invalid */
2850
2851/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2852FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2853{
2854 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2855 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2856 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2857}
2858
2859
2860/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2861FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2862{
2863 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2864 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2865 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2866}
2867
2868
2869/* Opcode VEX.F3.0F 0x56 - invalid */
2870/* Opcode VEX.F2.0F 0x56 - invalid */
2871
2872
2873/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2874FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2875{
2876 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2877 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2878 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2879}
2880
2881
2882/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2883FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2884{
2885 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2887 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2888}
2889
2890
2891/* Opcode VEX.F3.0F 0x57 - invalid */
2892/* Opcode VEX.F2.0F 0x57 - invalid */
2893
2894
2895/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2896FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
2897{
2898 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2899 IEMOPMEDIAF3_INIT_VARS( vaddps);
2900 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2901}
2902
2903
2904/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2905FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
2906{
2907 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2908 IEMOPMEDIAF3_INIT_VARS( vaddpd);
2909 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2910}
2911
2912
2913/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2914FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
2915{
2916 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2917 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
2918 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
2919}
2920
2921
2922/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2923FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2924
2925/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2926FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2927/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2928FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2929/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2930FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2931/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2932FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2933
2934/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2935FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2936/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2937FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2938/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2939FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2940/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2941FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2942
2943/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2944FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2945/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2946FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2947/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2948FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2949/* Opcode VEX.F2.0F 0x5b - invalid */
2950
2951
2952/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2953FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
2954{
2955 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2956 IEMOPMEDIAF3_INIT_VARS( vsubps);
2957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2958}
2959
2960
2961/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2962FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
2963{
2964 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2965 IEMOPMEDIAF3_INIT_VARS( vsubpd);
2966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2967}
2968
2969
2970/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2971FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2972/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2973FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2974
2975/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2976FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2977/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2978FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2979/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2980FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2981/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2982FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2983
2984/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2985FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2986/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2987FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2988/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2989FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2990/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2991FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2992
2993/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2994FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2995/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2996FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2997/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2998FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2999/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3000FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
3001
3002
3003/* Opcode VEX.0F 0x60 - invalid */
3004
3005
3006/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
3007FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
3008{
3009 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3010 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
3011 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3012}
3013
3014
3015/* Opcode VEX.F3.0F 0x60 - invalid */
3016
3017
3018/* Opcode VEX.0F 0x61 - invalid */
3019
3020
3021/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
3022FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
3023{
3024 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3025 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
3026 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3027}
3028
3029
3030/* Opcode VEX.F3.0F 0x61 - invalid */
3031
3032
3033/* Opcode VEX.0F 0x62 - invalid */
3034
3035/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
3036FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
3037{
3038 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3039 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
3040 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3041}
3042
3043
3044/* Opcode VEX.F3.0F 0x62 - invalid */
3045
3046
3047
3048/* Opcode VEX.0F 0x63 - invalid */
3049
3050
3051/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
3052FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
3053{
3054 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3055 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
3056 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3057}
3058
3059
3060/* Opcode VEX.F3.0F 0x63 - invalid */
3061
3062/* Opcode VEX.0F 0x64 - invalid */
3063
3064
3065/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
3066FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
3067{
3068 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3069 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3070 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3071}
3072
3073
3074/* Opcode VEX.F3.0F 0x64 - invalid */
3075
3076/* Opcode VEX.0F 0x65 - invalid */
3077
3078
3079/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3080FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3081{
3082 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3083 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3084 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3085}
3086
3087
3088/* Opcode VEX.F3.0F 0x65 - invalid */
3089
3090/* Opcode VEX.0F 0x66 - invalid */
3091
3092
3093/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3094FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3095{
3096 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3097 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3098 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3099}
3100
3101
3102/* Opcode VEX.F3.0F 0x66 - invalid */
3103
3104/* Opcode VEX.0F 0x67 - invalid */
3105
3106
3107/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3108FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3109{
3110 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3111 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3112 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3113}
3114
3115
3116/* Opcode VEX.F3.0F 0x67 - invalid */
3117
3118
3119///**
3120// * Common worker for SSE2 instructions on the form:
3121// * pxxxx xmm1, xmm2/mem128
3122// *
3123// * The 2nd operand is the second half of a register, which in the memory case
3124// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3125// * where it may read the full 128 bits or only the upper 64 bits.
3126// *
3127// * Exceptions type 4.
3128// */
3129//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3130//{
3131// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3132// if (IEM_IS_MODRM_REG_MODE(bRm))
3133// {
3134// /*
3135// * Register, register.
3136// */
3137// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3138// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3139// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3140// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3141// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3142// IEM_MC_PREPARE_SSE_USAGE();
3143// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3144// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3145// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3146// IEM_MC_ADVANCE_RIP_AND_FINISH();
3147// IEM_MC_END();
3148// }
3149// else
3150// {
3151// /*
3152// * Register, memory.
3153// */
3154// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3155// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3156// IEM_MC_LOCAL(RTUINT128U, uSrc);
3157// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3158// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3159//
3160// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3161// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3162// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3163// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3164//
3165// IEM_MC_PREPARE_SSE_USAGE();
3166// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3167// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3168//
3169// IEM_MC_ADVANCE_RIP_AND_FINISH();
3170// IEM_MC_END();
3171// }
3172// return VINF_SUCCESS;
3173//}
3174
3175
3176/* Opcode VEX.0F 0x68 - invalid */
3177
3178/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3179FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3180{
3181 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3182 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3183 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3184}
3185
3186
3187/* Opcode VEX.F3.0F 0x68 - invalid */
3188
3189
3190/* Opcode VEX.0F 0x69 - invalid */
3191
3192
3193/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3194FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3195{
3196 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3197 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3198 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3199}
3200
3201
3202/* Opcode VEX.F3.0F 0x69 - invalid */
3203
3204
3205/* Opcode VEX.0F 0x6a - invalid */
3206
3207
3208/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3209FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3210{
3211 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3212 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3213 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3214}
3215
3216
3217/* Opcode VEX.F3.0F 0x6a - invalid */
3218
3219
3220/* Opcode VEX.0F 0x6b - invalid */
3221
3222
3223/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3224FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3225{
3226 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3227 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3228 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3229}
3230
3231
3232/* Opcode VEX.F3.0F 0x6b - invalid */
3233
3234
3235/* Opcode VEX.0F 0x6c - invalid */
3236
3237
3238/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3239FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3240{
3241 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3242 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3243 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3244}
3245
3246
3247/* Opcode VEX.F3.0F 0x6c - invalid */
3248/* Opcode VEX.F2.0F 0x6c - invalid */
3249
3250
3251/* Opcode VEX.0F 0x6d - invalid */
3252
3253
3254/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3255FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3256{
3257 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3258 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3259 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3260}
3261
3262
3263/* Opcode VEX.F3.0F 0x6d - invalid */
3264
3265
3266/* Opcode VEX.0F 0x6e - invalid */
3267
3268FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3269{
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3272 {
3273 /**
3274 * @opcode 0x6e
3275 * @opcodesub rex.w=1
3276 * @oppfx 0x66
3277 * @opcpuid avx
3278 * @opgroup og_avx_simdint_datamov
3279 * @opxcpttype 5
3280 * @optest 64-bit / op1=1 op2=2 -> op1=2
3281 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3282 */
3283 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3284 if (IEM_IS_MODRM_REG_MODE(bRm))
3285 {
3286 /* XMM, greg64 */
3287 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3288 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3289 IEM_MC_LOCAL(uint64_t, u64Tmp);
3290
3291 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3292 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3293
3294 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3295 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3296
3297 IEM_MC_ADVANCE_RIP_AND_FINISH();
3298 IEM_MC_END();
3299 }
3300 else
3301 {
3302 /* XMM, [mem64] */
3303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3305 IEM_MC_LOCAL(uint64_t, u64Tmp);
3306
3307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3308 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3309 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3310 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3311
3312 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3313 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3314
3315 IEM_MC_ADVANCE_RIP_AND_FINISH();
3316 IEM_MC_END();
3317 }
3318 }
3319 else
3320 {
3321 /**
3322 * @opdone
3323 * @opcode 0x6e
3324 * @opcodesub rex.w=0
3325 * @oppfx 0x66
3326 * @opcpuid avx
3327 * @opgroup og_avx_simdint_datamov
3328 * @opxcpttype 5
3329 * @opfunction iemOp_vmovd_q_Vy_Ey
3330 * @optest op1=1 op2=2 -> op1=2
3331 * @optest op1=0 op2=-42 -> op1=-42
3332 */
3333 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3334 if (IEM_IS_MODRM_REG_MODE(bRm))
3335 {
3336 /* XMM, greg32 */
3337 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3338 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3339 IEM_MC_LOCAL(uint32_t, u32Tmp);
3340
3341 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3342 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3343
3344 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3345 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3346
3347 IEM_MC_ADVANCE_RIP_AND_FINISH();
3348 IEM_MC_END();
3349 }
3350 else
3351 {
3352 /* XMM, [mem32] */
3353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3355 IEM_MC_LOCAL(uint32_t, u32Tmp);
3356
3357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3358 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3359 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3360 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3361
3362 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3363 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3364
3365 IEM_MC_ADVANCE_RIP_AND_FINISH();
3366 IEM_MC_END();
3367 }
3368 }
3369}
3370
3371
3372/* Opcode VEX.F3.0F 0x6e - invalid */
3373
3374
3375/* Opcode VEX.0F 0x6f - invalid */
3376
3377/**
3378 * @opcode 0x6f
3379 * @oppfx 0x66
3380 * @opcpuid avx
3381 * @opgroup og_avx_simdint_datamove
3382 * @opxcpttype 1
3383 * @optest op1=1 op2=2 -> op1=2
3384 * @optest op1=0 op2=-42 -> op1=-42
3385 */
3386FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3387{
3388 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3389 Assert(pVCpu->iem.s.uVexLength <= 1);
3390 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3391 if (IEM_IS_MODRM_REG_MODE(bRm))
3392 {
3393 /*
3394 * Register, register.
3395 */
3396 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3397 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3398
3399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3400 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3401 if (pVCpu->iem.s.uVexLength == 0)
3402 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3403 IEM_GET_MODRM_RM(pVCpu, bRm));
3404 else
3405 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3406 IEM_GET_MODRM_RM(pVCpu, bRm));
3407 IEM_MC_ADVANCE_RIP_AND_FINISH();
3408 IEM_MC_END();
3409 }
3410 else if (pVCpu->iem.s.uVexLength == 0)
3411 {
3412 /*
3413 * Register, memory128.
3414 */
3415 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3416 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3418
3419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3420 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3421 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3422 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3423
3424 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3425 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3426
3427 IEM_MC_ADVANCE_RIP_AND_FINISH();
3428 IEM_MC_END();
3429 }
3430 else
3431 {
3432 /*
3433 * Register, memory256.
3434 */
3435 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3436 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3438
3439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3440 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3441 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3442 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3443
3444 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3445 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3446
3447 IEM_MC_ADVANCE_RIP_AND_FINISH();
3448 IEM_MC_END();
3449 }
3450}
3451
3452/**
3453 * @opcode 0x6f
3454 * @oppfx 0xf3
3455 * @opcpuid avx
3456 * @opgroup og_avx_simdint_datamove
3457 * @opxcpttype 4UA
3458 * @optest op1=1 op2=2 -> op1=2
3459 * @optest op1=0 op2=-42 -> op1=-42
3460 */
3461FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3462{
3463 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3464 Assert(pVCpu->iem.s.uVexLength <= 1);
3465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3466 if (IEM_IS_MODRM_REG_MODE(bRm))
3467 {
3468 /*
3469 * Register, register.
3470 */
3471 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3472 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3473
3474 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3475 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3476 if (pVCpu->iem.s.uVexLength == 0)
3477 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3478 IEM_GET_MODRM_RM(pVCpu, bRm));
3479 else
3480 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3481 IEM_GET_MODRM_RM(pVCpu, bRm));
3482 IEM_MC_ADVANCE_RIP_AND_FINISH();
3483 IEM_MC_END();
3484 }
3485 else if (pVCpu->iem.s.uVexLength == 0)
3486 {
3487 /*
3488 * Register, memory128.
3489 */
3490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3491 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3496 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3498
3499 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3500 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3501
3502 IEM_MC_ADVANCE_RIP_AND_FINISH();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Register, memory256.
3509 */
3510 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3511 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3518
3519 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3520 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3521
3522 IEM_MC_ADVANCE_RIP_AND_FINISH();
3523 IEM_MC_END();
3524 }
3525}
3526
3527
3528/* Opcode VEX.0F 0x70 - invalid */
3529
3530
3531/**
3532 * Common worker for AVX/AVX2 instructions on the forms:
3533 * - vpxxx xmm0, xmm2/mem128, imm8
3534 * - vpxxx ymm0, ymm2/mem256, imm8
3535 *
3536 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3537 */
3538FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3539{
3540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3541 if (IEM_IS_MODRM_REG_MODE(bRm))
3542 {
3543 /*
3544 * Register, register.
3545 */
3546 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3547 if (pVCpu->iem.s.uVexLength)
3548 {
3549 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3550 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3551 IEM_MC_LOCAL(RTUINT256U, uDst);
3552 IEM_MC_LOCAL(RTUINT256U, uSrc);
3553 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3554 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3555 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3556 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3557 IEM_MC_PREPARE_AVX_USAGE();
3558 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3559 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3560 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3561 IEM_MC_ADVANCE_RIP_AND_FINISH();
3562 IEM_MC_END();
3563 }
3564 else
3565 {
3566 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3567 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3568 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3569 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3570 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3572 IEM_MC_PREPARE_AVX_USAGE();
3573 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3574 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3575 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3576 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3577 IEM_MC_ADVANCE_RIP_AND_FINISH();
3578 IEM_MC_END();
3579 }
3580 }
3581 else
3582 {
3583 /*
3584 * Register, memory.
3585 */
3586 if (pVCpu->iem.s.uVexLength)
3587 {
3588 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3589 IEM_MC_LOCAL(RTUINT256U, uDst);
3590 IEM_MC_LOCAL(RTUINT256U, uSrc);
3591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3592 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3593 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3594
3595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3596 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3597 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3598 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3599 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3600 IEM_MC_PREPARE_AVX_USAGE();
3601
3602 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3603 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3604 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3605
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 IEM_MC_END();
3608 }
3609 else
3610 {
3611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3612 IEM_MC_LOCAL(RTUINT128U, uSrc);
3613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3614 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3615 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3616
3617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3618 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3619 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3620 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3621 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3622 IEM_MC_PREPARE_AVX_USAGE();
3623
3624 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3625 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3626 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3627 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3628
3629 IEM_MC_ADVANCE_RIP_AND_FINISH();
3630 IEM_MC_END();
3631 }
3632 }
3633}
3634
3635
3636/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3637FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3638{
3639 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3640 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3641 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3642
3643}
3644
3645
3646/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3647FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3648{
3649 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3650 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3651 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3652
3653}
3654
3655
3656/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3657FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3658{
3659 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3660 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3661 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3662}
3663
3664
3665/**
3666 * Common worker(s) for AVX/AVX2 instructions on the forms:
3667 * - vpxxx xmm0, xmm2, imm8
3668 * - vpxxx ymm0, ymm2, imm8
3669 *
3670 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3671 */
3672FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3673{
3674 if (IEM_IS_MODRM_REG_MODE(bRm))
3675 {
3676 /*
3677 * Register, register.
3678 */
3679 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3680 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3681 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3682 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3683 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3684 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3685 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3686 IEM_MC_PREPARE_AVX_USAGE();
3687 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3688 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3689 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3690 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3691 IEM_MC_ADVANCE_RIP_AND_FINISH();
3692 IEM_MC_END();
3693 }
3694 /* No memory operand. */
3695 else
3696 IEMOP_RAISE_INVALID_OPCODE_RET();
3697}
3698
3699FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3700{
3701 if (IEM_IS_MODRM_REG_MODE(bRm))
3702 {
3703 /*
3704 * Register, register.
3705 */
3706 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3708 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3709 IEM_MC_LOCAL(RTUINT256U, uDst);
3710 IEM_MC_LOCAL(RTUINT256U, uSrc);
3711 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3712 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3713 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3714 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3715 IEM_MC_PREPARE_AVX_USAGE();
3716 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3717 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3718 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 IEM_MC_END();
3721 }
3722 /* No memory operand. */
3723 else
3724 IEMOP_RAISE_INVALID_OPCODE_RET();
3725}
3726
3727
3728/* Opcode VEX.0F 0x71 11/2 - invalid. */
3729/** Opcode VEX.66.0F 0x71 11/2. */
3730FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3731{
3732 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3733 if (pVCpu->iem.s.uVexLength)
3734 {
3735 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3736 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3737 }
3738 else
3739 {
3740 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3741 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3742 }
3743}
3744
3745
3746/* Opcode VEX.0F 0x71 11/4 - invalid */
3747/** Opcode VEX.66.0F 0x71 11/4. */
3748FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3749{
3750 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3751 if (pVCpu->iem.s.uVexLength)
3752 {
3753 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3754 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3755 }
3756 else
3757 {
3758 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3759 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3760 }
3761}
3762
3763/* Opcode VEX.0F 0x71 11/6 - invalid */
3764
3765/** Opcode VEX.66.0F 0x71 11/6. */
3766FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3767{
3768 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3769 if (pVCpu->iem.s.uVexLength)
3770 {
3771 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3772 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3773 }
3774 else
3775 {
3776 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3777 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3778 }
3779}
3780
3781
3782/**
3783 * VEX Group 12 jump table for register variant.
3784 */
3785IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3786{
3787 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3788 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3789 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3790 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3791 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3792 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3793 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3794 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3795};
3796AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3797
3798
3799/** Opcode VEX.0F 0x71. */
3800FNIEMOP_DEF(iemOp_VGrp12)
3801{
3802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3803 if (IEM_IS_MODRM_REG_MODE(bRm))
3804 /* register, register */
3805 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3806 + pVCpu->iem.s.idxPrefix], bRm);
3807 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3808}
3809
3810
3811/* Opcode VEX.0F 0x72 11/2 - invalid. */
3812/** Opcode VEX.66.0F 0x72 11/2. */
3813FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3814{
3815 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3816 if (pVCpu->iem.s.uVexLength)
3817 {
3818 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3819 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3820 }
3821 else
3822 {
3823 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3824 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3825 }
3826}
3827
3828
3829/* Opcode VEX.0F 0x72 11/4 - invalid. */
3830/** Opcode VEX.66.0F 0x72 11/4. */
3831FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3832{
3833 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3834 if (pVCpu->iem.s.uVexLength)
3835 {
3836 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3837 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3838 }
3839 else
3840 {
3841 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3842 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3843 }
3844}
3845
3846/* Opcode VEX.0F 0x72 11/6 - invalid. */
3847
3848/** Opcode VEX.66.0F 0x72 11/6. */
3849FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3850{
3851 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3852 if (pVCpu->iem.s.uVexLength)
3853 {
3854 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3855 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3856 }
3857 else
3858 {
3859 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3860 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3861 }
3862}
3863
3864
3865/**
3866 * Group 13 jump table for register variant.
3867 */
3868IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3869{
3870 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3871 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3872 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3873 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3874 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3875 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3876 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3877 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3878};
3879AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3880
3881/** Opcode VEX.0F 0x72. */
3882FNIEMOP_DEF(iemOp_VGrp13)
3883{
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 if (IEM_IS_MODRM_REG_MODE(bRm))
3886 /* register, register */
3887 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3888 + pVCpu->iem.s.idxPrefix], bRm);
3889 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3890}
3891
3892
3893/* Opcode VEX.0F 0x73 11/2 - invalid. */
3894/** Opcode VEX.66.0F 0x73 11/2. */
3895FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3896{
3897 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3898 if (pVCpu->iem.s.uVexLength)
3899 {
3900 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3901 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3902 }
3903 else
3904 {
3905 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3906 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3907 }
3908}
3909
3910
3911/** Opcode VEX.66.0F 0x73 11/3. */
3912FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
3913{
3914 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3915 if (pVCpu->iem.s.uVexLength)
3916 {
3917 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3918 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
3919 }
3920 else
3921 {
3922 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3923 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
3924 }
3925}
3926
3927/* Opcode VEX.0F 0x73 11/6 - invalid. */
3928
3929/** Opcode VEX.66.0F 0x73 11/6. */
3930FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3931{
3932 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3933 if (pVCpu->iem.s.uVexLength)
3934 {
3935 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3936 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3937 }
3938 else
3939 {
3940 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3941 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3942 }
3943}
3944
3945/** Opcode VEX.66.0F 0x73 11/7. */
3946FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
3947{
3948 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3949 if (pVCpu->iem.s.uVexLength)
3950 {
3951 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3952 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
3953 }
3954 else
3955 {
3956 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3957 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
3958 }
3959}
3960
3961/* Opcode VEX.0F 0x73 11/6 - invalid. */
3962
3963/**
3964 * Group 14 jump table for register variant.
3965 */
3966IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3967{
3968 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3969 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3970 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3971 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3972 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3973 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3974 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3975 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3976};
3977AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3978
3979
3980/** Opcode VEX.0F 0x73. */
3981FNIEMOP_DEF(iemOp_VGrp14)
3982{
3983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3984 if (IEM_IS_MODRM_REG_MODE(bRm))
3985 /* register, register */
3986 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3987 + pVCpu->iem.s.idxPrefix], bRm);
3988 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3989}
3990
3991
3992/* Opcode VEX.0F 0x74 - invalid */
3993
3994
3995/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3996FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3997{
3998 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3999 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4000 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4001}
4002
4003/* Opcode VEX.F3.0F 0x74 - invalid */
4004/* Opcode VEX.F2.0F 0x74 - invalid */
4005
4006
4007/* Opcode VEX.0F 0x75 - invalid */
4008
4009
4010/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
4011FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
4012{
4013 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4014 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
4015 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4016}
4017
4018
4019/* Opcode VEX.F3.0F 0x75 - invalid */
4020/* Opcode VEX.F2.0F 0x75 - invalid */
4021
4022
4023/* Opcode VEX.0F 0x76 - invalid */
4024
4025
4026/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
4027FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
4028{
4029 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4030 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
4031 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4032}
4033
4034
4035/* Opcode VEX.F3.0F 0x76 - invalid */
4036/* Opcode VEX.F2.0F 0x76 - invalid */
4037
4038
4039/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
4040FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
4041{
4042 Assert(pVCpu->iem.s.uVexLength <= 1);
4043 if (pVCpu->iem.s.uVexLength == 0)
4044 {
4045 /*
4046 * 128-bit: vzeroupper
4047 */
4048 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
4049 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4050
4051 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4052 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4053 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4054
4055 IEM_MC_CLEAR_YREG_128_UP(0);
4056 IEM_MC_CLEAR_YREG_128_UP(1);
4057 IEM_MC_CLEAR_YREG_128_UP(2);
4058 IEM_MC_CLEAR_YREG_128_UP(3);
4059 IEM_MC_CLEAR_YREG_128_UP(4);
4060 IEM_MC_CLEAR_YREG_128_UP(5);
4061 IEM_MC_CLEAR_YREG_128_UP(6);
4062 IEM_MC_CLEAR_YREG_128_UP(7);
4063
4064 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4065 {
4066 IEM_MC_CLEAR_YREG_128_UP( 8);
4067 IEM_MC_CLEAR_YREG_128_UP( 9);
4068 IEM_MC_CLEAR_YREG_128_UP(10);
4069 IEM_MC_CLEAR_YREG_128_UP(11);
4070 IEM_MC_CLEAR_YREG_128_UP(12);
4071 IEM_MC_CLEAR_YREG_128_UP(13);
4072 IEM_MC_CLEAR_YREG_128_UP(14);
4073 IEM_MC_CLEAR_YREG_128_UP(15);
4074 }
4075
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 IEM_MC_END();
4078 }
4079 else
4080 {
4081 /*
4082 * 256-bit: vzeroall
4083 */
4084 IEMOP_MNEMONIC(vzeroall, "vzeroall");
4085 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4086
4087 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4088 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4089 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4090
4091 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4092 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4093 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4094 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4095 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4096 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4097 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4098 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4099 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4100
4101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4102 {
4103 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4104 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4105 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4106 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4107 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4108 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4109 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4110 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4111 }
4112
4113 IEM_MC_ADVANCE_RIP_AND_FINISH();
4114 IEM_MC_END();
4115 }
4116}
4117
4118
4119/* Opcode VEX.66.0F 0x77 - invalid */
4120/* Opcode VEX.F3.0F 0x77 - invalid */
4121/* Opcode VEX.F2.0F 0x77 - invalid */
4122
4123/* Opcode VEX.0F 0x78 - invalid */
4124/* Opcode VEX.66.0F 0x78 - invalid */
4125/* Opcode VEX.F3.0F 0x78 - invalid */
4126/* Opcode VEX.F2.0F 0x78 - invalid */
4127
4128/* Opcode VEX.0F 0x79 - invalid */
4129/* Opcode VEX.66.0F 0x79 - invalid */
4130/* Opcode VEX.F3.0F 0x79 - invalid */
4131/* Opcode VEX.F2.0F 0x79 - invalid */
4132
4133/* Opcode VEX.0F 0x7a - invalid */
4134/* Opcode VEX.66.0F 0x7a - invalid */
4135/* Opcode VEX.F3.0F 0x7a - invalid */
4136/* Opcode VEX.F2.0F 0x7a - invalid */
4137
4138/* Opcode VEX.0F 0x7b - invalid */
4139/* Opcode VEX.66.0F 0x7b - invalid */
4140/* Opcode VEX.F3.0F 0x7b - invalid */
4141/* Opcode VEX.F2.0F 0x7b - invalid */
4142
4143/* Opcode VEX.0F 0x7c - invalid */
4144/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4145FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4146/* Opcode VEX.F3.0F 0x7c - invalid */
4147
4148
4149/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4150FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
4151{
4152 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4153 IEMOPMEDIAF3_INIT_VARS( vhaddps);
4154 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
4155}
4156
4157
4158/* Opcode VEX.0F 0x7d - invalid */
4159/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4160FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4161/* Opcode VEX.F3.0F 0x7d - invalid */
4162/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4163FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4164
4165
4166/* Opcode VEX.0F 0x7e - invalid */
4167
4168FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4169{
4170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4172 {
4173 /**
4174 * @opcode 0x7e
4175 * @opcodesub rex.w=1
4176 * @oppfx 0x66
4177 * @opcpuid avx
4178 * @opgroup og_avx_simdint_datamov
4179 * @opxcpttype 5
4180 * @optest 64-bit / op1=1 op2=2 -> op1=2
4181 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4182 */
4183 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4184 if (IEM_IS_MODRM_REG_MODE(bRm))
4185 {
4186 /* greg64, XMM */
4187 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4188 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4189 IEM_MC_LOCAL(uint64_t, u64Tmp);
4190
4191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4193
4194 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4195 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4196
4197 IEM_MC_ADVANCE_RIP_AND_FINISH();
4198 IEM_MC_END();
4199 }
4200 else
4201 {
4202 /* [mem64], XMM */
4203 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4205 IEM_MC_LOCAL(uint64_t, u64Tmp);
4206
4207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4208 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4209 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4210 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4211
4212 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4213 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4214
4215 IEM_MC_ADVANCE_RIP_AND_FINISH();
4216 IEM_MC_END();
4217 }
4218 }
4219 else
4220 {
4221 /**
4222 * @opdone
4223 * @opcode 0x7e
4224 * @opcodesub rex.w=0
4225 * @oppfx 0x66
4226 * @opcpuid avx
4227 * @opgroup og_avx_simdint_datamov
4228 * @opxcpttype 5
4229 * @opfunction iemOp_vmovd_q_Vy_Ey
4230 * @optest op1=1 op2=2 -> op1=2
4231 * @optest op1=0 op2=-42 -> op1=-42
4232 */
4233 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4234 if (IEM_IS_MODRM_REG_MODE(bRm))
4235 {
4236 /* greg32, XMM */
4237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4238 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4239 IEM_MC_LOCAL(uint32_t, u32Tmp);
4240
4241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4242 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4243
4244 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4245 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4246
4247 IEM_MC_ADVANCE_RIP_AND_FINISH();
4248 IEM_MC_END();
4249 }
4250 else
4251 {
4252 /* [mem32], XMM */
4253 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4255 IEM_MC_LOCAL(uint32_t, u32Tmp);
4256
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4258 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4260 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4261
4262 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4263 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4264
4265 IEM_MC_ADVANCE_RIP_AND_FINISH();
4266 IEM_MC_END();
4267 }
4268 }
4269}
4270
4271
4272/**
4273 * @opcode 0x7e
4274 * @oppfx 0xf3
4275 * @opcpuid avx
4276 * @opgroup og_avx_pcksclr_datamove
4277 * @opxcpttype none
4278 * @optest op1=1 op2=2 -> op1=2
4279 * @optest op1=0 op2=-42 -> op1=-42
4280 */
4281FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4282{
4283 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4285 if (IEM_IS_MODRM_REG_MODE(bRm))
4286 {
4287 /*
4288 * Register, register.
4289 */
4290 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4291 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4292
4293 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4294 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4295
4296 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4297 IEM_GET_MODRM_RM(pVCpu, bRm));
4298 IEM_MC_ADVANCE_RIP_AND_FINISH();
4299 IEM_MC_END();
4300 }
4301 else
4302 {
4303 /*
4304 * Memory, register.
4305 */
4306 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4307 IEM_MC_LOCAL(uint64_t, uSrc);
4308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4309
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4311 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4312 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4313 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4314
4315 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4316 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4317
4318 IEM_MC_ADVANCE_RIP_AND_FINISH();
4319 IEM_MC_END();
4320 }
4321
4322}
4323/* Opcode VEX.F2.0F 0x7e - invalid */
4324
4325
4326/* Opcode VEX.0F 0x7f - invalid */
4327
4328/**
4329 * @opcode 0x7f
4330 * @oppfx 0x66
4331 * @opcpuid avx
4332 * @opgroup og_avx_simdint_datamove
4333 * @opxcpttype 1
4334 * @optest op1=1 op2=2 -> op1=2
4335 * @optest op1=0 op2=-42 -> op1=-42
4336 */
4337FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4338{
4339 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4340 Assert(pVCpu->iem.s.uVexLength <= 1);
4341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4342 if (IEM_IS_MODRM_REG_MODE(bRm))
4343 {
4344 /*
4345 * Register, register.
4346 */
4347 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4348 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4349
4350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4351 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4352 if (pVCpu->iem.s.uVexLength == 0)
4353 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4354 IEM_GET_MODRM_REG(pVCpu, bRm));
4355 else
4356 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4357 IEM_GET_MODRM_REG(pVCpu, bRm));
4358 IEM_MC_ADVANCE_RIP_AND_FINISH();
4359 IEM_MC_END();
4360 }
4361 else if (pVCpu->iem.s.uVexLength == 0)
4362 {
4363 /*
4364 * Register, memory128.
4365 */
4366 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4367 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4369
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4371 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4374
4375 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4376 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4377
4378 IEM_MC_ADVANCE_RIP_AND_FINISH();
4379 IEM_MC_END();
4380 }
4381 else
4382 {
4383 /*
4384 * Register, memory256.
4385 */
4386 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4387 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4389
4390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4391 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4392 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4393 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4394
4395 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4396 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4397
4398 IEM_MC_ADVANCE_RIP_AND_FINISH();
4399 IEM_MC_END();
4400 }
4401}
4402
4403
4404/**
4405 * @opcode 0x7f
4406 * @oppfx 0xf3
4407 * @opcpuid avx
4408 * @opgroup og_avx_simdint_datamove
4409 * @opxcpttype 4UA
4410 * @optest op1=1 op2=2 -> op1=2
4411 * @optest op1=0 op2=-42 -> op1=-42
4412 */
4413FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4414{
4415 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4416 Assert(pVCpu->iem.s.uVexLength <= 1);
4417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4418 if (IEM_IS_MODRM_REG_MODE(bRm))
4419 {
4420 /*
4421 * Register, register.
4422 */
4423 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4424 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4425
4426 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4427 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4428 if (pVCpu->iem.s.uVexLength == 0)
4429 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4430 IEM_GET_MODRM_REG(pVCpu, bRm));
4431 else
4432 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4433 IEM_GET_MODRM_REG(pVCpu, bRm));
4434 IEM_MC_ADVANCE_RIP_AND_FINISH();
4435 IEM_MC_END();
4436 }
4437 else if (pVCpu->iem.s.uVexLength == 0)
4438 {
4439 /*
4440 * Register, memory128.
4441 */
4442 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4443 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4445
4446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4447 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4448 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4449 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4450
4451 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4452 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4453
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 }
4457 else
4458 {
4459 /*
4460 * Register, memory256.
4461 */
4462 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4463 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4465
4466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4467 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4468 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4469 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4470
4471 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4472 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4473
4474 IEM_MC_ADVANCE_RIP_AND_FINISH();
4475 IEM_MC_END();
4476 }
4477}
4478
4479/* Opcode VEX.F2.0F 0x7f - invalid */
4480
4481
4482/* Opcode VEX.0F 0x80 - invalid */
4483/* Opcode VEX.0F 0x81 - invalid */
4484/* Opcode VEX.0F 0x82 - invalid */
4485/* Opcode VEX.0F 0x83 - invalid */
4486/* Opcode VEX.0F 0x84 - invalid */
4487/* Opcode VEX.0F 0x85 - invalid */
4488/* Opcode VEX.0F 0x86 - invalid */
4489/* Opcode VEX.0F 0x87 - invalid */
4490/* Opcode VEX.0F 0x88 - invalid */
4491/* Opcode VEX.0F 0x89 - invalid */
4492/* Opcode VEX.0F 0x8a - invalid */
4493/* Opcode VEX.0F 0x8b - invalid */
4494/* Opcode VEX.0F 0x8c - invalid */
4495/* Opcode VEX.0F 0x8d - invalid */
4496/* Opcode VEX.0F 0x8e - invalid */
4497/* Opcode VEX.0F 0x8f - invalid */
4498/* Opcode VEX.0F 0x90 - invalid */
4499/* Opcode VEX.0F 0x91 - invalid */
4500/* Opcode VEX.0F 0x92 - invalid */
4501/* Opcode VEX.0F 0x93 - invalid */
4502/* Opcode VEX.0F 0x94 - invalid */
4503/* Opcode VEX.0F 0x95 - invalid */
4504/* Opcode VEX.0F 0x96 - invalid */
4505/* Opcode VEX.0F 0x97 - invalid */
4506/* Opcode VEX.0F 0x98 - invalid */
4507/* Opcode VEX.0F 0x99 - invalid */
4508/* Opcode VEX.0F 0x9a - invalid */
4509/* Opcode VEX.0F 0x9b - invalid */
4510/* Opcode VEX.0F 0x9c - invalid */
4511/* Opcode VEX.0F 0x9d - invalid */
4512/* Opcode VEX.0F 0x9e - invalid */
4513/* Opcode VEX.0F 0x9f - invalid */
4514/* Opcode VEX.0F 0xa0 - invalid */
4515/* Opcode VEX.0F 0xa1 - invalid */
4516/* Opcode VEX.0F 0xa2 - invalid */
4517/* Opcode VEX.0F 0xa3 - invalid */
4518/* Opcode VEX.0F 0xa4 - invalid */
4519/* Opcode VEX.0F 0xa5 - invalid */
4520/* Opcode VEX.0F 0xa6 - invalid */
4521/* Opcode VEX.0F 0xa7 - invalid */
4522/* Opcode VEX.0F 0xa8 - invalid */
4523/* Opcode VEX.0F 0xa9 - invalid */
4524/* Opcode VEX.0F 0xaa - invalid */
4525/* Opcode VEX.0F 0xab - invalid */
4526/* Opcode VEX.0F 0xac - invalid */
4527/* Opcode VEX.0F 0xad - invalid */
4528
4529
4530/* Opcode VEX.0F 0xae mem/0 - invalid. */
4531/* Opcode VEX.0F 0xae mem/1 - invalid. */
4532
4533/**
4534 * @ opmaps grp15
4535 * @ opcode !11/2
4536 * @ oppfx none
4537 * @ opcpuid sse
4538 * @ opgroup og_sse_mxcsrsm
4539 * @ opxcpttype 5
4540 * @ optest op1=0 -> mxcsr=0
4541 * @ optest op1=0x2083 -> mxcsr=0x2083
4542 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4543 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4544 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4545 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4546 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4547 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4548 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4549 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4550 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4551 */
4552FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4553//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4554//{
4555// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4556// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4557// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4558// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4559// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4560// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4561// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4562// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4563// IEM_MC_END();
4564// return VINF_SUCCESS;
4565//}
4566
4567
4568/**
4569 * @opmaps vexgrp15
4570 * @opcode !11/3
4571 * @oppfx none
4572 * @opcpuid avx
4573 * @opgroup og_avx_mxcsrsm
4574 * @opxcpttype 5
4575 * @optest mxcsr=0 -> op1=0
4576 * @optest mxcsr=0x2083 -> op1=0x2083
4577 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4578 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4579 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4580 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4581 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4582 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4583 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4584 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4585 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4586 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4587 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4588 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4589 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4590 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4591 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4592 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4593 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4594 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4595 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4596 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4597 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4598 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4599 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4600 * -> value.xcpt=0x6
4601 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4602 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4603 * APMv4 rev 3.17 page 509.
4604 * @todo Test this instruction on AMD Ryzen.
4605 */
4606FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4607{
4608 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4609 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4610 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4612 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4613 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4614 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4615 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4616 IEM_MC_END();
4617}
4618
4619/* Opcode VEX.0F 0xae mem/4 - invalid. */
4620/* Opcode VEX.0F 0xae mem/5 - invalid. */
4621/* Opcode VEX.0F 0xae mem/6 - invalid. */
4622/* Opcode VEX.0F 0xae mem/7 - invalid. */
4623
4624/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4625/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4626/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4627/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4628/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4629/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4630/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4631/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4632
4633/**
4634 * Vex group 15 jump table for memory variant.
4635 */
4636IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4637{ /* pfx: none, 066h, 0f3h, 0f2h */
4638 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4639 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4640 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4641 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4642 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4643 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4644 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4645 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4646};
4647AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4648
4649
4650/** Opcode vex. 0xae. */
4651FNIEMOP_DEF(iemOp_VGrp15)
4652{
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654 if (IEM_IS_MODRM_REG_MODE(bRm))
4655 /* register, register */
4656 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4657
4658 /* memory, register */
4659 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4660 + pVCpu->iem.s.idxPrefix], bRm);
4661}
4662
4663
4664/* Opcode VEX.0F 0xaf - invalid. */
4665
4666/* Opcode VEX.0F 0xb0 - invalid. */
4667/* Opcode VEX.0F 0xb1 - invalid. */
4668/* Opcode VEX.0F 0xb2 - invalid. */
4669/* Opcode VEX.0F 0xb2 - invalid. */
4670/* Opcode VEX.0F 0xb3 - invalid. */
4671/* Opcode VEX.0F 0xb4 - invalid. */
4672/* Opcode VEX.0F 0xb5 - invalid. */
4673/* Opcode VEX.0F 0xb6 - invalid. */
4674/* Opcode VEX.0F 0xb7 - invalid. */
4675/* Opcode VEX.0F 0xb8 - invalid. */
4676/* Opcode VEX.0F 0xb9 - invalid. */
4677/* Opcode VEX.0F 0xba - invalid. */
4678/* Opcode VEX.0F 0xbb - invalid. */
4679/* Opcode VEX.0F 0xbc - invalid. */
4680/* Opcode VEX.0F 0xbd - invalid. */
4681/* Opcode VEX.0F 0xbe - invalid. */
4682/* Opcode VEX.0F 0xbf - invalid. */
4683
4684/* Opcode VEX.0F 0xc0 - invalid. */
4685/* Opcode VEX.66.0F 0xc0 - invalid. */
4686/* Opcode VEX.F3.0F 0xc0 - invalid. */
4687/* Opcode VEX.F2.0F 0xc0 - invalid. */
4688
4689/* Opcode VEX.0F 0xc1 - invalid. */
4690/* Opcode VEX.66.0F 0xc1 - invalid. */
4691/* Opcode VEX.F3.0F 0xc1 - invalid. */
4692/* Opcode VEX.F2.0F 0xc1 - invalid. */
4693
4694/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4695FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4696/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4697FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4698/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4699FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4700/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4701FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4702
4703/* Opcode VEX.0F 0xc3 - invalid */
4704/* Opcode VEX.66.0F 0xc3 - invalid */
4705/* Opcode VEX.F3.0F 0xc3 - invalid */
4706/* Opcode VEX.F2.0F 0xc3 - invalid */
4707
4708/* Opcode VEX.0F 0xc4 - invalid */
4709
4710
4711/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4712FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4713{
4714 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 /*
4719 * Register, register.
4720 */
4721 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4722 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4723 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4724 IEM_MC_LOCAL(uint16_t, uValue);
4725
4726 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4727 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4728 IEM_MC_PREPARE_AVX_USAGE();
4729
4730 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4731 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
4732 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4733 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4734 IEM_MC_ADVANCE_RIP_AND_FINISH();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 /*
4740 * Register, memory.
4741 */
4742 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4744 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4745 IEM_MC_LOCAL(uint16_t, uValue);
4746
4747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4748 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4749 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4750 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4751 IEM_MC_PREPARE_AVX_USAGE();
4752
4753 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4754 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4755 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4756 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4757 IEM_MC_ADVANCE_RIP_AND_FINISH();
4758 IEM_MC_END();
4759 }
4760}
4761
4762
4763/* Opcode VEX.F3.0F 0xc4 - invalid */
4764/* Opcode VEX.F2.0F 0xc4 - invalid */
4765
4766/* Opcode VEX.0F 0xc5 - invalid */
4767
4768
4769/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4770FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4771{
4772 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4774 if (IEM_IS_MODRM_REG_MODE(bRm))
4775 {
4776 /*
4777 * greg32, XMM, imm8.
4778 */
4779 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4780 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4781 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4782 IEM_MC_LOCAL(uint16_t, uValue);
4783 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4784 IEM_MC_PREPARE_AVX_USAGE();
4785 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4786 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4787 IEM_MC_ADVANCE_RIP_AND_FINISH();
4788 IEM_MC_END();
4789 }
4790 /* No memory operand. */
4791 else
4792 IEMOP_RAISE_INVALID_OPCODE_RET();
4793}
4794
4795
4796/* Opcode VEX.F3.0F 0xc5 - invalid */
4797/* Opcode VEX.F2.0F 0xc5 - invalid */
4798
4799
4800#define VSHUFP_X(a_Instr) \
4801 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4802 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4803 { \
4804 /* \
4805 * Register, register. \
4806 */ \
4807 if (pVCpu->iem.s.uVexLength) \
4808 { \
4809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4811 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4812 IEM_MC_LOCAL(RTUINT256U, uDst); \
4813 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4814 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4815 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4816 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4817 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4818 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4819 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4820 IEM_MC_PREPARE_AVX_USAGE(); \
4821 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4822 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4823 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4824 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4825 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4826 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4827 IEM_MC_END(); \
4828 } \
4829 else \
4830 { \
4831 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4833 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4834 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4835 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4836 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4837 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4838 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4839 IEM_MC_PREPARE_AVX_USAGE(); \
4840 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4841 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4842 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4843 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4844 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4845 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4847 IEM_MC_END(); \
4848 } \
4849 } \
4850 else \
4851 { \
4852 /* \
4853 * Register, memory. \
4854 */ \
4855 if (pVCpu->iem.s.uVexLength) \
4856 { \
4857 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4858 IEM_MC_LOCAL(RTUINT256U, uDst); \
4859 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4860 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4862 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4863 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4864 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4866 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4867 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4868 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4870 IEM_MC_PREPARE_AVX_USAGE(); \
4871 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4872 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4873 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4874 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4875 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4876 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4877 IEM_MC_END(); \
4878 } \
4879 else \
4880 { \
4881 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4882 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4884 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4885 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4886 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4888 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4889 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4890 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4891 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4892 IEM_MC_PREPARE_AVX_USAGE(); \
4893 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4894 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4895 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4896 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4897 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4898 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4899 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4900 IEM_MC_END(); \
4901 } \
4902 } \
4903 (void)0
4904
4905/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4906FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4907{
4908 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4909 VSHUFP_X(vshufps);
4910}
4911
4912
4913/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4914FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4915{
4916 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4917 VSHUFP_X(vshufpd);
4918}
4919#undef VSHUFP_X
4920
4921
4922/* Opcode VEX.F3.0F 0xc6 - invalid */
4923/* Opcode VEX.F2.0F 0xc6 - invalid */
4924
4925/* Opcode VEX.0F 0xc7 - invalid */
4926/* Opcode VEX.66.0F 0xc7 - invalid */
4927/* Opcode VEX.F3.0F 0xc7 - invalid */
4928/* Opcode VEX.F2.0F 0xc7 - invalid */
4929
4930/* Opcode VEX.0F 0xc8 - invalid */
4931/* Opcode VEX.0F 0xc9 - invalid */
4932/* Opcode VEX.0F 0xca - invalid */
4933/* Opcode VEX.0F 0xcb - invalid */
4934/* Opcode VEX.0F 0xcc - invalid */
4935/* Opcode VEX.0F 0xcd - invalid */
4936/* Opcode VEX.0F 0xce - invalid */
4937/* Opcode VEX.0F 0xcf - invalid */
4938
4939
4940/* Opcode VEX.0F 0xd0 - invalid */
4941/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4942FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4943/* Opcode VEX.F3.0F 0xd0 - invalid */
4944/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4945FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4946
4947/* Opcode VEX.0F 0xd1 - invalid */
4948/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4949FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4950{
4951 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4952 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4953 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4954}
4955
4956/* Opcode VEX.F3.0F 0xd1 - invalid */
4957/* Opcode VEX.F2.0F 0xd1 - invalid */
4958
4959/* Opcode VEX.0F 0xd2 - invalid */
4960/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4961FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4962{
4963 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4964 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4966}
4967
4968/* Opcode VEX.F3.0F 0xd2 - invalid */
4969/* Opcode VEX.F2.0F 0xd2 - invalid */
4970
4971/* Opcode VEX.0F 0xd3 - invalid */
4972/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4973FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4974{
4975 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4976 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4977 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4978}
4979
4980/* Opcode VEX.F3.0F 0xd3 - invalid */
4981/* Opcode VEX.F2.0F 0xd3 - invalid */
4982
4983/* Opcode VEX.0F 0xd4 - invalid */
4984
4985
4986/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4987FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4988{
4989 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4990 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
4991 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4992}
4993
4994
4995/* Opcode VEX.F3.0F 0xd4 - invalid */
4996/* Opcode VEX.F2.0F 0xd4 - invalid */
4997
4998/* Opcode VEX.0F 0xd5 - invalid */
4999
5000
5001/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
5002FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
5003{
5004 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5005 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
5006 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5007}
5008
5009
5010/* Opcode VEX.F3.0F 0xd5 - invalid */
5011/* Opcode VEX.F2.0F 0xd5 - invalid */
5012
5013/* Opcode VEX.0F 0xd6 - invalid */
5014
5015/**
5016 * @opcode 0xd6
5017 * @oppfx 0x66
5018 * @opcpuid avx
5019 * @opgroup og_avx_pcksclr_datamove
5020 * @opxcpttype none
5021 * @optest op1=-1 op2=2 -> op1=2
5022 * @optest op1=0 op2=-42 -> op1=-42
5023 */
5024FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
5025{
5026 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5028 if (IEM_IS_MODRM_REG_MODE(bRm))
5029 {
5030 /*
5031 * Register, register.
5032 */
5033 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5034 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5035
5036 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5037 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5038
5039 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5040 IEM_GET_MODRM_REG(pVCpu, bRm));
5041 IEM_MC_ADVANCE_RIP_AND_FINISH();
5042 IEM_MC_END();
5043 }
5044 else
5045 {
5046 /*
5047 * Memory, register.
5048 */
5049 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5050 IEM_MC_LOCAL(uint64_t, uSrc);
5051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5052
5053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5054 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5055 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5056 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5057
5058 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5059 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5060
5061 IEM_MC_ADVANCE_RIP_AND_FINISH();
5062 IEM_MC_END();
5063 }
5064}
5065
5066/* Opcode VEX.F3.0F 0xd6 - invalid */
5067/* Opcode VEX.F2.0F 0xd6 - invalid */
5068
5069
5070/* Opcode VEX.0F 0xd7 - invalid */
5071
5072/** Opcode VEX.66.0F 0xd7 - */
5073FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
5074{
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 /* Docs says register only. */
5077 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
5078 {
5079 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
5080 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
5081 if (pVCpu->iem.s.uVexLength)
5082 {
5083 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5084 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
5085 IEM_MC_ARG(uint64_t *, puDst, 0);
5086 IEM_MC_LOCAL(RTUINT256U, uSrc);
5087 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
5088 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5089 IEM_MC_PREPARE_AVX_USAGE();
5090 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5091 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5092 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
5093 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
5094 IEM_MC_ADVANCE_RIP_AND_FINISH();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5100 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5101 IEM_MC_ARG(uint64_t *, puDst, 0);
5102 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5103 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5104 IEM_MC_PREPARE_AVX_USAGE();
5105 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5106 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5107 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5108 IEM_MC_ADVANCE_RIP_AND_FINISH();
5109 IEM_MC_END();
5110 }
5111 }
5112 else
5113 IEMOP_RAISE_INVALID_OPCODE_RET();
5114}
5115
5116
5117/* Opcode VEX.F3.0F 0xd7 - invalid */
5118/* Opcode VEX.F2.0F 0xd7 - invalid */
5119
5120
5121/* Opcode VEX.0F 0xd8 - invalid */
5122
5123/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5124FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5125{
5126 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5127 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5129}
5130
5131
5132/* Opcode VEX.F3.0F 0xd8 - invalid */
5133/* Opcode VEX.F2.0F 0xd8 - invalid */
5134
5135/* Opcode VEX.0F 0xd9 - invalid */
5136
5137
5138/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5139FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5140{
5141 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5142 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5143 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5144}
5145
5146
5147/* Opcode VEX.F3.0F 0xd9 - invalid */
5148/* Opcode VEX.F2.0F 0xd9 - invalid */
5149
5150/* Opcode VEX.0F 0xda - invalid */
5151
5152
5153/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5154FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5155{
5156 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5157 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5158 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5159}
5160
5161
5162/* Opcode VEX.F3.0F 0xda - invalid */
5163/* Opcode VEX.F2.0F 0xda - invalid */
5164
5165/* Opcode VEX.0F 0xdb - invalid */
5166
5167
5168/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5169FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5170{
5171 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5172 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5173 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5174}
5175
5176
5177/* Opcode VEX.F3.0F 0xdb - invalid */
5178/* Opcode VEX.F2.0F 0xdb - invalid */
5179
5180/* Opcode VEX.0F 0xdc - invalid */
5181
5182
5183/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5184FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5185{
5186 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5187 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5188 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5189}
5190
5191
5192/* Opcode VEX.F3.0F 0xdc - invalid */
5193/* Opcode VEX.F2.0F 0xdc - invalid */
5194
5195/* Opcode VEX.0F 0xdd - invalid */
5196
5197
5198/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5199FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5200{
5201 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5202 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5203 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5204}
5205
5206
5207/* Opcode VEX.F3.0F 0xdd - invalid */
5208/* Opcode VEX.F2.0F 0xdd - invalid */
5209
5210/* Opcode VEX.0F 0xde - invalid */
5211
5212
5213/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5214FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5215{
5216 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5217 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5218 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5219}
5220
5221
5222/* Opcode VEX.F3.0F 0xde - invalid */
5223/* Opcode VEX.F2.0F 0xde - invalid */
5224
5225/* Opcode VEX.0F 0xdf - invalid */
5226
5227
5228/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5229FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5230{
5231 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5232 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5233 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5234}
5235
5236
5237/* Opcode VEX.F3.0F 0xdf - invalid */
5238/* Opcode VEX.F2.0F 0xdf - invalid */
5239
5240/* Opcode VEX.0F 0xe0 - invalid */
5241
5242
5243/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5244FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5245{
5246 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5247 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5248 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5249}
5250
5251
5252/* Opcode VEX.F3.0F 0xe0 - invalid */
5253/* Opcode VEX.F2.0F 0xe0 - invalid */
5254
5255/* Opcode VEX.0F 0xe1 - invalid */
5256/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5257FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5258{
5259 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5260 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5261 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5262}
5263
5264/* Opcode VEX.F3.0F 0xe1 - invalid */
5265/* Opcode VEX.F2.0F 0xe1 - invalid */
5266
5267/* Opcode VEX.0F 0xe2 - invalid */
5268/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5269FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5270{
5271 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5272 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5273 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5274}
5275
5276/* Opcode VEX.F3.0F 0xe2 - invalid */
5277/* Opcode VEX.F2.0F 0xe2 - invalid */
5278
5279/* Opcode VEX.0F 0xe3 - invalid */
5280
5281
5282/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5283FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5284{
5285 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5286 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5287 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5288}
5289
5290
5291/* Opcode VEX.F3.0F 0xe3 - invalid */
5292/* Opcode VEX.F2.0F 0xe3 - invalid */
5293
5294/* Opcode VEX.0F 0xe4 - invalid */
5295
5296
5297/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5298FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5299{
5300 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5301 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5302 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5303}
5304
5305
5306/* Opcode VEX.F3.0F 0xe4 - invalid */
5307/* Opcode VEX.F2.0F 0xe4 - invalid */
5308
5309/* Opcode VEX.0F 0xe5 - invalid */
5310
5311
5312/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5313FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5314{
5315 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5316 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5317 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5318}
5319
5320
5321/* Opcode VEX.F3.0F 0xe5 - invalid */
5322/* Opcode VEX.F2.0F 0xe5 - invalid */
5323
5324/* Opcode VEX.0F 0xe6 - invalid */
5325/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5326FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5327/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5328FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5329/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5330FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5331
5332
5333/* Opcode VEX.0F 0xe7 - invalid */
5334
5335/**
5336 * @opcode 0xe7
5337 * @opcodesub !11 mr/reg
5338 * @oppfx 0x66
5339 * @opcpuid avx
5340 * @opgroup og_avx_cachect
5341 * @opxcpttype 1
5342 * @optest op1=-1 op2=2 -> op1=2
5343 * @optest op1=0 op2=-42 -> op1=-42
5344 */
5345FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5346{
5347 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5348 Assert(pVCpu->iem.s.uVexLength <= 1);
5349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5350 if (IEM_IS_MODRM_MEM_MODE(bRm))
5351 {
5352 if (pVCpu->iem.s.uVexLength == 0)
5353 {
5354 /*
5355 * 128-bit: Memory, register.
5356 */
5357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5358 IEM_MC_LOCAL(RTUINT128U, uSrc);
5359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5360
5361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5362 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5365
5366 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5367 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5368
5369 IEM_MC_ADVANCE_RIP_AND_FINISH();
5370 IEM_MC_END();
5371 }
5372 else
5373 {
5374 /*
5375 * 256-bit: Memory, register.
5376 */
5377 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5378 IEM_MC_LOCAL(RTUINT256U, uSrc);
5379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5380
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5382 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5383 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5384 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5385
5386 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5387 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5388
5389 IEM_MC_ADVANCE_RIP_AND_FINISH();
5390 IEM_MC_END();
5391 }
5392 }
5393 /**
5394 * @opdone
5395 * @opmnemonic udvex660fe7reg
5396 * @opcode 0xe7
5397 * @opcodesub 11 mr/reg
5398 * @oppfx 0x66
5399 * @opunused immediate
5400 * @opcpuid avx
5401 * @optest ->
5402 */
5403 else
5404 IEMOP_RAISE_INVALID_OPCODE_RET();
5405}
5406
5407/* Opcode VEX.F3.0F 0xe7 - invalid */
5408/* Opcode VEX.F2.0F 0xe7 - invalid */
5409
5410
5411/* Opcode VEX.0F 0xe8 - invalid */
5412
5413
5414/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5415FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5416{
5417 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5418 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5420}
5421
5422
5423/* Opcode VEX.F3.0F 0xe8 - invalid */
5424/* Opcode VEX.F2.0F 0xe8 - invalid */
5425
5426/* Opcode VEX.0F 0xe9 - invalid */
5427
5428
5429/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5430FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5431{
5432 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5433 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5434 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5435}
5436
5437
5438/* Opcode VEX.F3.0F 0xe9 - invalid */
5439/* Opcode VEX.F2.0F 0xe9 - invalid */
5440
5441/* Opcode VEX.0F 0xea - invalid */
5442
5443
5444/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5445FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5446{
5447 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5448 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5449 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5450}
5451
5452
5453/* Opcode VEX.F3.0F 0xea - invalid */
5454/* Opcode VEX.F2.0F 0xea - invalid */
5455
5456/* Opcode VEX.0F 0xeb - invalid */
5457
5458
5459/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5460FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5461{
5462 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5463 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5464 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5465}
5466
5467
5468
5469/* Opcode VEX.F3.0F 0xeb - invalid */
5470/* Opcode VEX.F2.0F 0xeb - invalid */
5471
5472/* Opcode VEX.0F 0xec - invalid */
5473
5474
5475/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5476FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5477{
5478 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5479 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5480 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5481}
5482
5483
5484/* Opcode VEX.F3.0F 0xec - invalid */
5485/* Opcode VEX.F2.0F 0xec - invalid */
5486
5487/* Opcode VEX.0F 0xed - invalid */
5488
5489
5490/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5491FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5492{
5493 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5494 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5496}
5497
5498
5499/* Opcode VEX.F3.0F 0xed - invalid */
5500/* Opcode VEX.F2.0F 0xed - invalid */
5501
5502/* Opcode VEX.0F 0xee - invalid */
5503
5504
5505/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5506FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5507{
5508 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5509 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5510 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5511}
5512
5513
5514/* Opcode VEX.F3.0F 0xee - invalid */
5515/* Opcode VEX.F2.0F 0xee - invalid */
5516
5517
5518/* Opcode VEX.0F 0xef - invalid */
5519
5520
5521/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5522FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5523{
5524 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5525 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5526 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5527}
5528
5529
5530/* Opcode VEX.F3.0F 0xef - invalid */
5531/* Opcode VEX.F2.0F 0xef - invalid */
5532
5533/* Opcode VEX.0F 0xf0 - invalid */
5534/* Opcode VEX.66.0F 0xf0 - invalid */
5535
5536
5537/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5538FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5539{
5540 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5541 Assert(pVCpu->iem.s.uVexLength <= 1);
5542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5543 if (IEM_IS_MODRM_REG_MODE(bRm))
5544 {
5545 /*
5546 * Register, register - (not implemented, assuming it raises \#UD).
5547 */
5548 IEMOP_RAISE_INVALID_OPCODE_RET();
5549 }
5550 else if (pVCpu->iem.s.uVexLength == 0)
5551 {
5552 /*
5553 * Register, memory128.
5554 */
5555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5556 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5558
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5560 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5561 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5562 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5563
5564 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5565 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5566
5567 IEM_MC_ADVANCE_RIP_AND_FINISH();
5568 IEM_MC_END();
5569 }
5570 else
5571 {
5572 /*
5573 * Register, memory256.
5574 */
5575 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5576 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5578
5579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5580 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5582 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5583
5584 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5585 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5586
5587 IEM_MC_ADVANCE_RIP_AND_FINISH();
5588 IEM_MC_END();
5589 }
5590}
5591
5592
5593/* Opcode VEX.0F 0xf1 - invalid */
5594/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5595FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5596{
5597 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5598 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5599 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5600}
5601
5602/* Opcode VEX.F2.0F 0xf1 - invalid */
5603
5604/* Opcode VEX.0F 0xf2 - invalid */
5605/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5606FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5607{
5608 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5609 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5610 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5611}
5612/* Opcode VEX.F2.0F 0xf2 - invalid */
5613
5614/* Opcode VEX.0F 0xf3 - invalid */
5615/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5616FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5617{
5618 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5619 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5620 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5621}
5622/* Opcode VEX.F2.0F 0xf3 - invalid */
5623
5624/* Opcode VEX.0F 0xf4 - invalid */
5625
5626
5627/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5628FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5629{
5630 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5631 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5632 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5633}
5634
5635
5636/* Opcode VEX.F2.0F 0xf4 - invalid */
5637
5638/* Opcode VEX.0F 0xf5 - invalid */
5639
5640
5641/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5642FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5643{
5644 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5645 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5646 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5647}
5648
5649
5650/* Opcode VEX.F2.0F 0xf5 - invalid */
5651
5652/* Opcode VEX.0F 0xf6 - invalid */
5653
5654
5655/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5656FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5657{
5658 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5659 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5660 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5661}
5662
5663
5664/* Opcode VEX.F2.0F 0xf6 - invalid */
5665
5666/* Opcode VEX.0F 0xf7 - invalid */
5667
5668
5669/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5670FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
5671{
5672// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
5673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5674 if (IEM_IS_MODRM_REG_MODE(bRm))
5675 {
5676 /*
5677 * XMM, XMM, (implicit) [ ER]DI
5678 */
5679 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5680 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5681 IEM_MC_LOCAL( uint64_t, u64EffAddr);
5682 IEM_MC_LOCAL( RTUINT128U, u128Mem);
5683 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
5684 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
5685 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
5686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5687 IEM_MC_PREPARE_AVX_USAGE();
5688
5689 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
5690 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
5691 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5692 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
5693 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
5694 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
5695
5696 IEM_MC_ADVANCE_RIP_AND_FINISH();
5697 IEM_MC_END();
5698 }
5699 else
5700 {
5701 /* The memory, register encoding is invalid. */
5702 IEMOP_RAISE_INVALID_OPCODE_RET();
5703 }
5704}
5705
5706
5707/* Opcode VEX.F2.0F 0xf7 - invalid */
5708
5709/* Opcode VEX.0F 0xf8 - invalid */
5710
5711
5712/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5713FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5714{
5715 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5716 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
5717 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5718}
5719
5720
5721/* Opcode VEX.F2.0F 0xf8 - invalid */
5722
5723/* Opcode VEX.0F 0xf9 - invalid */
5724
5725
5726/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5727FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5728{
5729 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5730 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
5731 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5732}
5733
5734
5735/* Opcode VEX.F2.0F 0xf9 - invalid */
5736
5737/* Opcode VEX.0F 0xfa - invalid */
5738
5739
5740/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5741FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5742{
5743 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5744 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
5745 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5746}
5747
5748
5749/* Opcode VEX.F2.0F 0xfa - invalid */
5750
5751/* Opcode VEX.0F 0xfb - invalid */
5752
5753
5754/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5755FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5756{
5757 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5758 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
5759 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5760}
5761
5762
5763/* Opcode VEX.F2.0F 0xfb - invalid */
5764
5765/* Opcode VEX.0F 0xfc - invalid */
5766
5767
5768/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5769FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5770{
5771 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5772 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
5773 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5774}
5775
5776
5777/* Opcode VEX.F2.0F 0xfc - invalid */
5778
5779/* Opcode VEX.0F 0xfd - invalid */
5780
5781
5782/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5783FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5784{
5785 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5786 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
5787 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5788}
5789
5790
5791/* Opcode VEX.F2.0F 0xfd - invalid */
5792
5793/* Opcode VEX.0F 0xfe - invalid */
5794
5795
5796/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5797FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5798{
5799 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5800 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
5801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5802}
5803
5804
5805/* Opcode VEX.F2.0F 0xfe - invalid */
5806
5807
5808/** Opcode **** 0x0f 0xff - UD0 */
5809FNIEMOP_DEF(iemOp_vud0)
5810{
5811/** @todo testcase: vud0 */
5812 IEMOP_MNEMONIC(vud0, "vud0");
5813 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5814 {
5815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5816 if (IEM_IS_MODRM_MEM_MODE(bRm))
5817 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5818 }
5819 IEMOP_HLP_DONE_DECODING();
5820 IEMOP_RAISE_INVALID_OPCODE_RET();
5821}
5822
5823
5824
5825/**
5826 * VEX opcode map \#1.
5827 *
5828 * @sa g_apfnTwoByteMap
5829 */
5830const PFNIEMOP g_apfnVexMap1[] =
5831{
5832 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5833 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5834 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5835 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5836 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5837 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5838 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5839 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5840 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5841 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5842 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5843 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5844 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5845 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5846 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5847 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5848 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5849
5850 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5851 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5852 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5853 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5854 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5855 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5856 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5857 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5858 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5859 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5860 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5861 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5862 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5863 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5864 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5865 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5866
5867 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5868 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5869 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5870 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5871 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5875 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5876 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5877 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5878 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5879 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5880 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5881 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5882 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5883
5884 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5885 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5887 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5888 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5889 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5890 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5891 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5893 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5894 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5895 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5896 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5897 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5898 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5899 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5900
5901 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5902 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5903 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5904 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5905 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5906 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5907 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5908 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5909 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5910 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5911 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5912 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5913 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5914 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5915 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5916 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5917
5918 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5919 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5920 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5921 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5922 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5923 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5924 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5925 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5926 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5927 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5928 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5929 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5930 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5931 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5932 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5933 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5934
5935 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5936 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5937 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5938 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5939 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5940 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5941 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5942 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5943 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5944 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5945 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5946 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5947 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5948 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5949 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5950 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5951
5952 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5953 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5954 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5955 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5956 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5957 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5958 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5959 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5960 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5961 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5962 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5963 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5964 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5965 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5966 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5967 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5968
5969 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5970 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5971 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5972 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5973 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5974 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5975 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5976 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5977 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5978 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5979 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5980 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5981 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5982 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5983 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5984 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5985
5986 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5987 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5988 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5989 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5990 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5991 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5992 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5993 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5994 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5995 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5996 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5997 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5998 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5999 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
6000 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
6001 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
6002
6003 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6004 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6005 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6006 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6007 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6008 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6009 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6010 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6011 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6012 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6013 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
6014 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
6015 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
6016 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
6017 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
6018 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
6019
6020 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6021 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6022 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
6023 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6024 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
6025 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
6026 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
6027 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6028 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6029 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6030 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
6031 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
6032 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
6033 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
6034 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
6035 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
6036
6037 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
6038 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
6039 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
6040 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
6041 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6042 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
6043 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
6044 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
6045 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
6046 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
6047 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
6048 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
6049 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
6050 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
6051 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
6052 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
6053
6054 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
6055 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6056 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6057 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6058 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6059 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6060 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6061 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6062 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6063 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6064 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6065 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6066 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6067 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6068 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6069 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6070
6071 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6072 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6073 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6074 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6075 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6076 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6077 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
6078 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6079 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6080 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6081 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6082 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6083 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6084 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6085 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6086 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6087
6088 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
6089 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6090 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6091 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6092 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6093 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6094 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6095 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6096 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6097 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6098 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6099 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6100 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6101 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6102 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6103 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6104};
6105AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6106/** @} */
6107
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette