VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 103187

Last change on this file since 103187 was 103185, checked in by vboxsync, 14 months ago

VMM/IEMAllInst*: Liveness analysis, part 2: Flag input & modification annotations. bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 216.9 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 103185 2024-02-04 15:42:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145}
146
147
148/**
149 * Common worker for AVX2 instructions on the forms:
150 * - vpxxx xmm0, xmm1, xmm2/mem128
151 * - vpxxx ymm0, ymm1, ymm2/mem256
152 *
153 * Takes function table for function w/o implicit state parameter.
154 *
155 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
156 */
157FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
158{
159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
160 if (IEM_IS_MODRM_REG_MODE(bRm))
161 {
162 /*
163 * Register, register.
164 */
165 if (pVCpu->iem.s.uVexLength)
166 {
167 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_LOCAL(RTUINT256U, uDst);
170 IEM_MC_LOCAL(RTUINT256U, uSrc1);
171 IEM_MC_LOCAL(RTUINT256U, uSrc2);
172 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
173 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
178 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
180 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_ADVANCE_RIP_AND_FINISH();
182 IEM_MC_END();
183 }
184 else
185 {
186 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_ARG(PRTUINT128U, puDst, 0);
189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_PREPARE_AVX_USAGE();
193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
194 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
195 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
197 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
198 IEM_MC_ADVANCE_RIP_AND_FINISH();
199 IEM_MC_END();
200 }
201 }
202 else
203 {
204 /*
205 * Register, memory.
206 */
207 if (pVCpu->iem.s.uVexLength)
208 {
209 IEM_MC_BEGIN(3, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
210 IEM_MC_LOCAL(RTUINT256U, uDst);
211 IEM_MC_LOCAL(RTUINT256U, uSrc1);
212 IEM_MC_LOCAL(RTUINT256U, uSrc2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
214 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
215 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
217
218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
219 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222
223 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
224 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
226 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
227
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
234 IEM_MC_LOCAL(RTUINT128U, uSrc2);
235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
236 IEM_MC_ARG(PRTUINT128U, puDst, 0);
237 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
239
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
243 IEM_MC_PREPARE_AVX_USAGE();
244
245 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
249 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
250
251 IEM_MC_ADVANCE_RIP_AND_FINISH();
252 IEM_MC_END();
253 }
254 }
255}
256
257
258/**
259 * Common worker for AVX2 instructions on the forms:
260 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
261 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
262 *
263 * The 128-bit memory version of this instruction may elect to skip fetching the
264 * lower 64 bits of the operand. We, however, do not.
265 *
266 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
267 */
268FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
269{
270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
271}
272
273
274/**
275 * Common worker for AVX2 instructions on the forms:
276 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
277 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
278 *
279 * The 128-bit memory version of this instruction may elect to skip fetching the
280 * higher 64 bits of the operand. We, however, do not.
281 *
282 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
283 */
284FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
285{
286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
287}
288
289
290/**
291 * Common worker for AVX2 instructions on the forms:
292 * - vpxxx xmm0, xmm1/mem128
293 * - vpxxx ymm0, ymm1/mem256
294 *
295 * Takes function table for function w/o implicit state parameter.
296 *
297 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
298 */
299FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
300{
301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
302 if (IEM_IS_MODRM_REG_MODE(bRm))
303 {
304 /*
305 * Register, register.
306 */
307 if (pVCpu->iem.s.uVexLength)
308 {
309 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
311 IEM_MC_LOCAL(RTUINT256U, uDst);
312 IEM_MC_LOCAL(RTUINT256U, uSrc);
313 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
314 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
316 IEM_MC_PREPARE_AVX_USAGE();
317 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
319 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
320 IEM_MC_ADVANCE_RIP_AND_FINISH();
321 IEM_MC_END();
322 }
323 else
324 {
325 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
326 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
327 IEM_MC_ARG(PRTUINT128U, puDst, 0);
328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_PREPARE_AVX_USAGE();
331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
332 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
333 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
334 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
335 IEM_MC_ADVANCE_RIP_AND_FINISH();
336 IEM_MC_END();
337 }
338 }
339 else
340 {
341 /*
342 * Register, memory.
343 */
344 if (pVCpu->iem.s.uVexLength)
345 {
346 IEM_MC_BEGIN(2, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
347 IEM_MC_LOCAL(RTUINT256U, uDst);
348 IEM_MC_LOCAL(RTUINT256U, uSrc);
349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
350 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
352
353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
354 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
356 IEM_MC_PREPARE_AVX_USAGE();
357
358 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
360 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365 else
366 {
367 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
371 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
372
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
376 IEM_MC_PREPARE_AVX_USAGE();
377
378 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
379 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
380 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
382
383 IEM_MC_ADVANCE_RIP_AND_FINISH();
384 IEM_MC_END();
385 }
386 }
387}
388
389
390/* Opcode VEX.0F 0x00 - invalid */
391/* Opcode VEX.0F 0x01 - invalid */
392/* Opcode VEX.0F 0x02 - invalid */
393/* Opcode VEX.0F 0x03 - invalid */
394/* Opcode VEX.0F 0x04 - invalid */
395/* Opcode VEX.0F 0x05 - invalid */
396/* Opcode VEX.0F 0x06 - invalid */
397/* Opcode VEX.0F 0x07 - invalid */
398/* Opcode VEX.0F 0x08 - invalid */
399/* Opcode VEX.0F 0x09 - invalid */
400/* Opcode VEX.0F 0x0a - invalid */
401
402/** Opcode VEX.0F 0x0b. */
403FNIEMOP_DEF(iemOp_vud2)
404{
405 IEMOP_MNEMONIC(vud2, "vud2");
406 IEMOP_RAISE_INVALID_OPCODE_RET();
407}
408
409/* Opcode VEX.0F 0x0c - invalid */
410/* Opcode VEX.0F 0x0d - invalid */
411/* Opcode VEX.0F 0x0e - invalid */
412/* Opcode VEX.0F 0x0f - invalid */
413
414
415/**
416 * @opcode 0x10
417 * @oppfx none
418 * @opcpuid avx
419 * @opgroup og_avx_simdfp_datamove
420 * @opxcpttype 4UA
421 * @optest op1=1 op2=2 -> op1=2
422 * @optest op1=0 op2=-22 -> op1=-22
423 */
424FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
425{
426 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
427 Assert(pVCpu->iem.s.uVexLength <= 1);
428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
429 if (IEM_IS_MODRM_REG_MODE(bRm))
430 {
431 /*
432 * Register, register.
433 */
434 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
438 if (pVCpu->iem.s.uVexLength == 0)
439 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
440 IEM_GET_MODRM_RM(pVCpu, bRm));
441 else
442 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 IEM_MC_ADVANCE_RIP_AND_FINISH();
445 IEM_MC_END();
446 }
447 else if (pVCpu->iem.s.uVexLength == 0)
448 {
449 /*
450 * 128-bit: Register, Memory
451 */
452 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
453 IEM_MC_LOCAL(RTUINT128U, uSrc);
454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455
456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
460
461 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
463
464 IEM_MC_ADVANCE_RIP_AND_FINISH();
465 IEM_MC_END();
466 }
467 else
468 {
469 /*
470 * 256-bit: Register, Memory
471 */
472 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEM_MC_LOCAL(RTUINT256U, uSrc);
474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
480
481 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
483
484 IEM_MC_ADVANCE_RIP_AND_FINISH();
485 IEM_MC_END();
486 }
487}
488
489
490/**
491 * @opcode 0x10
492 * @oppfx 0x66
493 * @opcpuid avx
494 * @opgroup og_avx_simdfp_datamove
495 * @opxcpttype 4UA
496 * @optest op1=1 op2=2 -> op1=2
497 * @optest op1=0 op2=-22 -> op1=-22
498 */
499FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
500{
501 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
502 Assert(pVCpu->iem.s.uVexLength <= 1);
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 if (IEM_IS_MODRM_REG_MODE(bRm))
505 {
506 /*
507 * Register, register.
508 */
509 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
513 if (pVCpu->iem.s.uVexLength == 0)
514 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
515 IEM_GET_MODRM_RM(pVCpu, bRm));
516 else
517 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
518 IEM_GET_MODRM_RM(pVCpu, bRm));
519 IEM_MC_ADVANCE_RIP_AND_FINISH();
520 IEM_MC_END();
521 }
522 else if (pVCpu->iem.s.uVexLength == 0)
523 {
524 /*
525 * 128-bit: Memory, register.
526 */
527 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
528 IEM_MC_LOCAL(RTUINT128U, uSrc);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
530
531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
535
536 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
537 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
538
539 IEM_MC_ADVANCE_RIP_AND_FINISH();
540 IEM_MC_END();
541 }
542 else
543 {
544 /*
545 * 256-bit: Memory, register.
546 */
547 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
548 IEM_MC_LOCAL(RTUINT256U, uSrc);
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
555
556 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
558
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562}
563
564
565FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
566{
567 Assert(pVCpu->iem.s.uVexLength <= 1);
568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
569 if (IEM_IS_MODRM_REG_MODE(bRm))
570 {
571 /**
572 * @opcode 0x10
573 * @oppfx 0xf3
574 * @opcodesub 11 mr/reg
575 * @opcpuid avx
576 * @opgroup og_avx_simdfp_datamerge
577 * @opxcpttype 5
578 * @optest op1=1 op2=0 op3=2 -> op1=2
579 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
580 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
581 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
582 * @note HssHi refers to bits 127:32.
583 */
584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
585 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
586 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
589 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
590 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
591 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 else
596 {
597 /**
598 * @opdone
599 * @opcode 0x10
600 * @oppfx 0xf3
601 * @opcodesub !11 mr/reg
602 * @opcpuid avx
603 * @opgroup og_avx_simdfp_datamove
604 * @opxcpttype 5
605 * @opfunction iemOp_vmovss_Vss_Hss_Wss
606 * @optest op1=1 op2=2 -> op1=2
607 * @optest op1=0 op2=-22 -> op1=-22
608 */
609 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
610 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
611 IEM_MC_LOCAL(uint32_t, uSrc);
612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
613
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
618
619 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
620 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
621
622 IEM_MC_ADVANCE_RIP_AND_FINISH();
623 IEM_MC_END();
624 }
625}
626
627
628FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
629{
630 Assert(pVCpu->iem.s.uVexLength <= 1);
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (IEM_IS_MODRM_REG_MODE(bRm))
633 {
634 /**
635 * @opcode 0x10
636 * @oppfx 0xf2
637 * @opcodesub 11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamerge
640 * @opxcpttype 5
641 * @optest op1=1 op2=0 op3=2 -> op1=2
642 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
643 * @optest op1=3 op2=-1 op3=0x77 ->
644 * op1=0xffffffffffffffff0000000000000077
645 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
646 */
647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
648 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
650
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
653 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
654 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
655 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /**
662 * @opdone
663 * @opcode 0x10
664 * @oppfx 0xf2
665 * @opcodesub !11 mr/reg
666 * @opcpuid avx
667 * @opgroup og_avx_simdfp_datamove
668 * @opxcpttype 5
669 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
670 * @optest op1=1 op2=2 -> op1=2
671 * @optest op1=0 op2=-22 -> op1=-22
672 */
673 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
674 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
675 IEM_MC_LOCAL(uint64_t, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764}
765
766
767/**
768 * @opcode 0x11
769 * @oppfx 0x66
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 4UA
773 * @optest op1=1 op2=2 -> op1=2
774 * @optest op1=0 op2=-22 -> op1=-22
775 */
776FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
777{
778 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
779 Assert(pVCpu->iem.s.uVexLength <= 1);
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790 if (pVCpu->iem.s.uVexLength == 0)
791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
792 IEM_GET_MODRM_REG(pVCpu, bRm));
793 else
794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
795 IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else if (pVCpu->iem.s.uVexLength == 0)
800 {
801 /*
802 * 128-bit: Memory, register.
803 */
804 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807
808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
812
813 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
814 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
815
816 IEM_MC_ADVANCE_RIP_AND_FINISH();
817 IEM_MC_END();
818 }
819 else
820 {
821 /*
822 * 256-bit: Memory, register.
823 */
824 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
825 IEM_MC_LOCAL(RTUINT256U, uSrc);
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
832
833 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
834 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
835
836 IEM_MC_ADVANCE_RIP_AND_FINISH();
837 IEM_MC_END();
838 }
839}
840
841
842FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
843{
844 Assert(pVCpu->iem.s.uVexLength <= 1);
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /**
849 * @opcode 0x11
850 * @oppfx 0xf3
851 * @opcodesub 11 mr/reg
852 * @opcpuid avx
853 * @opgroup og_avx_simdfp_datamerge
854 * @opxcpttype 5
855 * @optest op1=1 op2=0 op3=2 -> op1=2
856 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
857 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
858 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
859 */
860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
861 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
862 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
863
864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
865 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
866 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
867 IEM_GET_MODRM_REG(pVCpu, bRm),
868 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872 else
873 {
874 /**
875 * @opdone
876 * @opcode 0x11
877 * @oppfx 0xf3
878 * @opcodesub !11 mr/reg
879 * @opcpuid avx
880 * @opgroup og_avx_simdfp_datamove
881 * @opxcpttype 5
882 * @opfunction iemOp_vmovss_Vss_Hss_Wss
883 * @optest op1=1 op2=2 -> op1=2
884 * @optest op1=0 op2=-22 -> op1=-22
885 */
886 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
887 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
888 IEM_MC_LOCAL(uint32_t, uSrc);
889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
890
891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
895
896 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
898
899 IEM_MC_ADVANCE_RIP_AND_FINISH();
900 IEM_MC_END();
901 }
902}
903
904
905FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
906{
907 Assert(pVCpu->iem.s.uVexLength <= 1);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if (IEM_IS_MODRM_REG_MODE(bRm))
910 {
911 /**
912 * @opcode 0x11
913 * @oppfx 0xf2
914 * @opcodesub 11 mr/reg
915 * @opcpuid avx
916 * @opgroup og_avx_simdfp_datamerge
917 * @opxcpttype 5
918 * @optest op1=1 op2=0 op3=2 -> op1=2
919 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
920 * @optest op1=3 op2=-1 op3=0x77 ->
921 * op1=0xffffffffffffffff0000000000000077
922 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
923 */
924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
925 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
926 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
927
928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
930 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
931 IEM_GET_MODRM_REG(pVCpu, bRm),
932 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936 else
937 {
938 /**
939 * @opdone
940 * @opcode 0x11
941 * @oppfx 0xf2
942 * @opcodesub !11 mr/reg
943 * @opcpuid avx
944 * @opgroup og_avx_simdfp_datamove
945 * @opxcpttype 5
946 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
947 * @optest op1=1 op2=2 -> op1=2
948 * @optest op1=0 op2=-22 -> op1=-22
949 */
950 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
951 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(uint64_t, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966}
967
968
969FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (IEM_IS_MODRM_REG_MODE(bRm))
973 {
974 /**
975 * @opcode 0x12
976 * @opcodesub 11 mr/reg
977 * @oppfx none
978 * @opcpuid avx
979 * @opgroup og_avx_simdfp_datamerge
980 * @opxcpttype 7LZ
981 * @optest op2=0x2200220122022203
982 * op3=0x3304330533063307
983 * -> op1=0x22002201220222033304330533063307
984 * @optest op2=-1 op3=-42 -> op1=-42
985 * @note op3 and op2 are only the 8-byte high XMM register halfs.
986 */
987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
988 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
990
991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
993 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
994 IEM_GET_MODRM_RM(pVCpu, bRm),
995 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
996
997 IEM_MC_ADVANCE_RIP_AND_FINISH();
998 IEM_MC_END();
999 }
1000 else
1001 {
1002 /**
1003 * @opdone
1004 * @opcode 0x12
1005 * @opcodesub !11 mr/reg
1006 * @oppfx none
1007 * @opcpuid avx
1008 * @opgroup og_avx_simdfp_datamove
1009 * @opxcpttype 5LZ
1010 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1011 * @optest op1=1 op2=0 op3=0 -> op1=0
1012 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1013 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1014 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1015 */
1016 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1017
1018 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEM_MC_LOCAL(uint64_t, uSrc);
1020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1021
1022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1023 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1024 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1025 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1026
1027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1028 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1029 uSrc,
1030 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1031
1032 IEM_MC_ADVANCE_RIP_AND_FINISH();
1033 IEM_MC_END();
1034 }
1035}
1036
1037
1038/**
1039 * @opcode 0x12
1040 * @opcodesub !11 mr/reg
1041 * @oppfx 0x66
1042 * @opcpuid avx
1043 * @opgroup og_avx_pcksclr_datamerge
1044 * @opxcpttype 5LZ
1045 * @optest op2=0 op3=2 -> op1=2
1046 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1047 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1048 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1049 */
1050FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1051{
1052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1053 if (IEM_IS_MODRM_MEM_MODE(bRm))
1054 {
1055 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1056
1057 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1058 IEM_MC_LOCAL(uint64_t, uSrc);
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1065
1066 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1067 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1068 uSrc,
1069 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1070
1071 IEM_MC_ADVANCE_RIP_AND_FINISH();
1072 IEM_MC_END();
1073 }
1074
1075 /**
1076 * @opdone
1077 * @opmnemonic udvex660f12m3
1078 * @opcode 0x12
1079 * @opcodesub 11 mr/reg
1080 * @oppfx 0x66
1081 * @opunused immediate
1082 * @opcpuid avx
1083 * @optest ->
1084 */
1085 else
1086 IEMOP_RAISE_INVALID_OPCODE_RET();
1087}
1088
1089
1090/**
1091 * @opcode 0x12
1092 * @oppfx 0xf3
1093 * @opcpuid avx
1094 * @opgroup og_avx_pcksclr_datamove
1095 * @opxcpttype 4
1096 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1097 * -> op1=0x00000002000000020000000100000001
1098 * @optest vex.l==1 /
1099 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1100 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1101 */
1102FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1103{
1104 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1105 Assert(pVCpu->iem.s.uVexLength <= 1);
1106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1107 if (IEM_IS_MODRM_REG_MODE(bRm))
1108 {
1109 /*
1110 * Register, register.
1111 */
1112 if (pVCpu->iem.s.uVexLength == 0)
1113 {
1114 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1115 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1116 IEM_MC_LOCAL(RTUINT128U, uSrc);
1117
1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1119 IEM_MC_PREPARE_AVX_USAGE();
1120
1121 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1122 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1123 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1124 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1125 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1126 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1127
1128 IEM_MC_ADVANCE_RIP_AND_FINISH();
1129 IEM_MC_END();
1130 }
1131 else
1132 {
1133 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1134 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1135 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1136 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1137 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1138
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1142
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 }
1147 else
1148 {
1149 /*
1150 * Register, memory.
1151 */
1152 if (pVCpu->iem.s.uVexLength == 0)
1153 {
1154 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1161 IEM_MC_PREPARE_AVX_USAGE();
1162
1163 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1164 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1165 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1166 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1167 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1168 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1169
1170 IEM_MC_ADVANCE_RIP_AND_FINISH();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1176 IEM_MC_LOCAL(RTUINT256U, uSrc);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1179 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1188 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1189
1190 IEM_MC_ADVANCE_RIP_AND_FINISH();
1191 IEM_MC_END();
1192 }
1193 }
1194}
1195
1196
1197/**
1198 * @opcode 0x12
1199 * @oppfx 0xf2
1200 * @opcpuid avx
1201 * @opgroup og_avx_pcksclr_datamove
1202 * @opxcpttype 5
1203 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1204 * -> op1=0x22222222111111112222222211111111
1205 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1206 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1207 */
1208FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1209{
1210 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 if (pVCpu->iem.s.uVexLength == 0)
1218 {
1219 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1221 IEM_MC_LOCAL(uint64_t, uSrc);
1222
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1227 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1229 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1238 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1239 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1240 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1241
1242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1243 IEM_MC_PREPARE_AVX_USAGE();
1244 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1245
1246 IEM_MC_ADVANCE_RIP_AND_FINISH();
1247 IEM_MC_END();
1248 }
1249 }
1250 else
1251 {
1252 /*
1253 * Register, memory.
1254 */
1255 if (pVCpu->iem.s.uVexLength == 0)
1256 {
1257 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_LOCAL(uint64_t, uSrc);
1260
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1264 IEM_MC_PREPARE_AVX_USAGE();
1265
1266 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1267 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1268 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1269 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1270
1271 IEM_MC_ADVANCE_RIP_AND_FINISH();
1272 IEM_MC_END();
1273 }
1274 else
1275 {
1276 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1277 IEM_MC_LOCAL(RTUINT256U, uSrc);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1280 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1282
1283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1286 IEM_MC_PREPARE_AVX_USAGE();
1287
1288 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1290
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 }
1295}
1296
1297
1298/**
1299 * @opcode 0x13
1300 * @opcodesub !11 mr/reg
1301 * @oppfx none
1302 * @opcpuid avx
1303 * @opgroup og_avx_simdfp_datamove
1304 * @opxcpttype 5
1305 * @optest op1=1 op2=2 -> op1=2
1306 * @optest op1=0 op2=-42 -> op1=-42
1307 */
1308FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if (IEM_IS_MODRM_MEM_MODE(bRm))
1312 {
1313 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1314
1315 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1316 IEM_MC_LOCAL(uint64_t, uSrc);
1317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1318
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1323
1324 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1326
1327 IEM_MC_ADVANCE_RIP_AND_FINISH();
1328 IEM_MC_END();
1329 }
1330
1331 /**
1332 * @opdone
1333 * @opmnemonic udvex0f13m3
1334 * @opcode 0x13
1335 * @opcodesub 11 mr/reg
1336 * @oppfx none
1337 * @opunused immediate
1338 * @opcpuid avx
1339 * @optest ->
1340 */
1341 else
1342 IEMOP_RAISE_INVALID_OPCODE_RET();
1343}
1344
1345
1346/**
1347 * @opcode 0x13
1348 * @opcodesub !11 mr/reg
1349 * @oppfx 0x66
1350 * @opcpuid avx
1351 * @opgroup og_avx_pcksclr_datamove
1352 * @opxcpttype 5
1353 * @optest op1=1 op2=2 -> op1=2
1354 * @optest op1=0 op2=-42 -> op1=-42
1355 */
1356FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1357{
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_MEM_MODE(bRm))
1360 {
1361 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1362 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1370
1371 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1372 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP_AND_FINISH();
1375 IEM_MC_END();
1376 }
1377
1378 /**
1379 * @opdone
1380 * @opmnemonic udvex660f13m3
1381 * @opcode 0x13
1382 * @opcodesub 11 mr/reg
1383 * @oppfx 0x66
1384 * @opunused immediate
1385 * @opcpuid avx
1386 * @optest ->
1387 */
1388 else
1389 IEMOP_RAISE_INVALID_OPCODE_RET();
1390}
1391
1392/* Opcode VEX.F3.0F 0x13 - invalid */
1393/* Opcode VEX.F2.0F 0x13 - invalid */
1394
1395/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1396FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1397{
1398 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1399 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1400 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1401}
1402
1403
1404/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1405FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1406{
1407 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1408 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1410}
1411
1412
1413/* Opcode VEX.F3.0F 0x14 - invalid */
1414/* Opcode VEX.F2.0F 0x14 - invalid */
1415
1416
1417/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1418FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1419{
1420 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1421 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1422 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1423}
1424
1425
1426/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1427FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1428{
1429 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1430 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1432}
1433
1434
1435/* Opcode VEX.F3.0F 0x15 - invalid */
1436/* Opcode VEX.F2.0F 0x15 - invalid */
1437
1438
1439FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1440{
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if (IEM_IS_MODRM_REG_MODE(bRm))
1443 {
1444 /**
1445 * @opcode 0x16
1446 * @opcodesub 11 mr/reg
1447 * @oppfx none
1448 * @opcpuid avx
1449 * @opgroup og_avx_simdfp_datamerge
1450 * @opxcpttype 7LZ
1451 */
1452 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1453
1454 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1455 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1456
1457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1458 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1459 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1460 IEM_GET_MODRM_RM(pVCpu, bRm),
1461 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1462
1463 IEM_MC_ADVANCE_RIP_AND_FINISH();
1464 IEM_MC_END();
1465 }
1466 else
1467 {
1468 /**
1469 * @opdone
1470 * @opcode 0x16
1471 * @opcodesub !11 mr/reg
1472 * @oppfx none
1473 * @opcpuid avx
1474 * @opgroup og_avx_simdfp_datamove
1475 * @opxcpttype 5LZ
1476 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1481 IEM_MC_LOCAL(uint64_t, uSrc);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483
1484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1485 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1488
1489 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1491 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1492 uSrc);
1493
1494 IEM_MC_ADVANCE_RIP_AND_FINISH();
1495 IEM_MC_END();
1496 }
1497}
1498
1499
1500/**
1501 * @opcode 0x16
1502 * @opcodesub !11 mr/reg
1503 * @oppfx 0x66
1504 * @opcpuid avx
1505 * @opgroup og_avx_pcksclr_datamerge
1506 * @opxcpttype 5LZ
1507 */
1508FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1509{
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if (IEM_IS_MODRM_MEM_MODE(bRm))
1512 {
1513 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1514
1515 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1516 IEM_MC_LOCAL(uint64_t, uSrc);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1523
1524 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1525 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1526 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1527 uSrc);
1528
1529 IEM_MC_ADVANCE_RIP_AND_FINISH();
1530 IEM_MC_END();
1531 }
1532
1533 /**
1534 * @opdone
1535 * @opmnemonic udvex660f16m3
1536 * @opcode 0x12
1537 * @opcodesub 11 mr/reg
1538 * @oppfx 0x66
1539 * @opunused immediate
1540 * @opcpuid avx
1541 * @optest ->
1542 */
1543 else
1544 IEMOP_RAISE_INVALID_OPCODE_RET();
1545}
1546
1547
1548/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1549/**
1550 * @opcode 0x16
1551 * @oppfx 0xf3
1552 * @opcpuid avx
1553 * @opgroup og_avx_pcksclr_datamove
1554 * @opxcpttype 4
1555 */
1556FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1557{
1558 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1559 Assert(pVCpu->iem.s.uVexLength <= 1);
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if (IEM_IS_MODRM_REG_MODE(bRm))
1562 {
1563 /*
1564 * Register, register.
1565 */
1566 if (pVCpu->iem.s.uVexLength == 0)
1567 {
1568 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1569 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1570 IEM_MC_LOCAL(RTUINT128U, uSrc);
1571
1572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1573 IEM_MC_PREPARE_AVX_USAGE();
1574
1575 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1579 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1580 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1581
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1589 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1590 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1591 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1596
1597 IEM_MC_ADVANCE_RIP_AND_FINISH();
1598 IEM_MC_END();
1599 }
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 if (pVCpu->iem.s.uVexLength == 0)
1607 {
1608 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1609 IEM_MC_LOCAL(RTUINT128U, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1620 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1621 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1622 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1623
1624 IEM_MC_ADVANCE_RIP_AND_FINISH();
1625 IEM_MC_END();
1626 }
1627 else
1628 {
1629 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1630 IEM_MC_LOCAL(RTUINT256U, uSrc);
1631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1632 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1633 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1634 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1635
1636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1637 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1639 IEM_MC_PREPARE_AVX_USAGE();
1640
1641 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1642 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1643
1644 IEM_MC_ADVANCE_RIP_AND_FINISH();
1645 IEM_MC_END();
1646 }
1647 }
1648}
1649
1650
1651/* Opcode VEX.F2.0F 0x16 - invalid */
1652
1653
1654/**
1655 * @opcode 0x17
1656 * @opcodesub !11 mr/reg
1657 * @oppfx none
1658 * @opcpuid avx
1659 * @opgroup og_avx_simdfp_datamove
1660 * @opxcpttype 5
1661 */
1662FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1663{
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 if (IEM_IS_MODRM_MEM_MODE(bRm))
1666 {
1667 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1668
1669 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1670 IEM_MC_LOCAL(uint64_t, uSrc);
1671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1672
1673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1674 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1677
1678 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1679 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1680
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684
1685 /**
1686 * @opdone
1687 * @opmnemonic udvex0f17m3
1688 * @opcode 0x17
1689 * @opcodesub 11 mr/reg
1690 * @oppfx none
1691 * @opunused immediate
1692 * @opcpuid avx
1693 * @optest ->
1694 */
1695 else
1696 IEMOP_RAISE_INVALID_OPCODE_RET();
1697}
1698
1699
1700/**
1701 * @opcode 0x17
1702 * @opcodesub !11 mr/reg
1703 * @oppfx 0x66
1704 * @opcpuid avx
1705 * @opgroup og_avx_pcksclr_datamove
1706 * @opxcpttype 5
1707 */
1708FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1709{
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if (IEM_IS_MODRM_MEM_MODE(bRm))
1712 {
1713 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1714
1715 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1716 IEM_MC_LOCAL(uint64_t, uSrc);
1717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1718
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1721 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1722 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1723
1724 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1725 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1726
1727 IEM_MC_ADVANCE_RIP_AND_FINISH();
1728 IEM_MC_END();
1729 }
1730
1731 /**
1732 * @opdone
1733 * @opmnemonic udvex660f17m3
1734 * @opcode 0x17
1735 * @opcodesub 11 mr/reg
1736 * @oppfx 0x66
1737 * @opunused immediate
1738 * @opcpuid avx
1739 * @optest ->
1740 */
1741 else
1742 IEMOP_RAISE_INVALID_OPCODE_RET();
1743}
1744
1745
1746/* Opcode VEX.F3.0F 0x17 - invalid */
1747/* Opcode VEX.F2.0F 0x17 - invalid */
1748
1749
1750/* Opcode VEX.0F 0x18 - invalid */
1751/* Opcode VEX.0F 0x19 - invalid */
1752/* Opcode VEX.0F 0x1a - invalid */
1753/* Opcode VEX.0F 0x1b - invalid */
1754/* Opcode VEX.0F 0x1c - invalid */
1755/* Opcode VEX.0F 0x1d - invalid */
1756/* Opcode VEX.0F 0x1e - invalid */
1757/* Opcode VEX.0F 0x1f - invalid */
1758
1759/* Opcode VEX.0F 0x20 - invalid */
1760/* Opcode VEX.0F 0x21 - invalid */
1761/* Opcode VEX.0F 0x22 - invalid */
1762/* Opcode VEX.0F 0x23 - invalid */
1763/* Opcode VEX.0F 0x24 - invalid */
1764/* Opcode VEX.0F 0x25 - invalid */
1765/* Opcode VEX.0F 0x26 - invalid */
1766/* Opcode VEX.0F 0x27 - invalid */
1767
1768/**
1769 * @opcode 0x28
1770 * @oppfx none
1771 * @opcpuid avx
1772 * @opgroup og_avx_pcksclr_datamove
1773 * @opxcpttype 1
1774 * @optest op1=1 op2=2 -> op1=2
1775 * @optest op1=0 op2=-42 -> op1=-42
1776 * @note Almost identical to vmovapd.
1777 */
1778FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1779{
1780 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1782 Assert(pVCpu->iem.s.uVexLength <= 1);
1783 if (IEM_IS_MODRM_REG_MODE(bRm))
1784 {
1785 /*
1786 * Register, register.
1787 */
1788 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1789 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1790
1791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1792 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1793 if (pVCpu->iem.s.uVexLength == 0)
1794 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1795 IEM_GET_MODRM_RM(pVCpu, bRm));
1796 else
1797 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1798 IEM_GET_MODRM_RM(pVCpu, bRm));
1799 IEM_MC_ADVANCE_RIP_AND_FINISH();
1800 IEM_MC_END();
1801 }
1802 else
1803 {
1804 /*
1805 * Register, memory.
1806 */
1807 if (pVCpu->iem.s.uVexLength == 0)
1808 {
1809 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1811 IEM_MC_LOCAL(RTUINT128U, uSrc);
1812
1813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1814 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1816 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1817
1818 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1819 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1820
1821 IEM_MC_ADVANCE_RIP_AND_FINISH();
1822 IEM_MC_END();
1823 }
1824 else
1825 {
1826 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1828 IEM_MC_LOCAL(RTUINT256U, uSrc);
1829
1830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1831 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1833 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1834
1835 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1836 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1837
1838 IEM_MC_ADVANCE_RIP_AND_FINISH();
1839 IEM_MC_END();
1840 }
1841 }
1842}
1843
1844
1845/**
1846 * @opcode 0x28
1847 * @oppfx 66
1848 * @opcpuid avx
1849 * @opgroup og_avx_pcksclr_datamove
1850 * @opxcpttype 1
1851 * @optest op1=1 op2=2 -> op1=2
1852 * @optest op1=0 op2=-42 -> op1=-42
1853 * @note Almost identical to vmovaps
1854 */
1855FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1856{
1857 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1859 Assert(pVCpu->iem.s.uVexLength <= 1);
1860 if (IEM_IS_MODRM_REG_MODE(bRm))
1861 {
1862 /*
1863 * Register, register.
1864 */
1865 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1866 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1867
1868 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1869 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1870 if (pVCpu->iem.s.uVexLength == 0)
1871 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1872 IEM_GET_MODRM_RM(pVCpu, bRm));
1873 else
1874 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1875 IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_ADVANCE_RIP_AND_FINISH();
1877 IEM_MC_END();
1878 }
1879 else
1880 {
1881 /*
1882 * Register, memory.
1883 */
1884 if (pVCpu->iem.s.uVexLength == 0)
1885 {
1886 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1888 IEM_MC_LOCAL(RTUINT128U, uSrc);
1889
1890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1891 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1893 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1894
1895 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1896 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1897
1898 IEM_MC_ADVANCE_RIP_AND_FINISH();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905 IEM_MC_LOCAL(RTUINT256U, uSrc);
1906
1907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1908 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1910 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1911
1912 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1913 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1914
1915 IEM_MC_ADVANCE_RIP_AND_FINISH();
1916 IEM_MC_END();
1917 }
1918 }
1919}
1920
1921/**
1922 * @opmnemonic udvexf30f28
1923 * @opcode 0x28
1924 * @oppfx 0xf3
1925 * @opunused vex.modrm
1926 * @opcpuid avx
1927 * @optest ->
1928 * @opdone
1929 */
1930
1931/**
1932 * @opmnemonic udvexf20f28
1933 * @opcode 0x28
1934 * @oppfx 0xf2
1935 * @opunused vex.modrm
1936 * @opcpuid avx
1937 * @optest ->
1938 * @opdone
1939 */
1940
1941/**
1942 * @opcode 0x29
1943 * @oppfx none
1944 * @opcpuid avx
1945 * @opgroup og_avx_pcksclr_datamove
1946 * @opxcpttype 1
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-42 -> op1=-42
1949 * @note Almost identical to vmovapd.
1950 */
1951FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1952{
1953 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1955 Assert(pVCpu->iem.s.uVexLength <= 1);
1956 if (IEM_IS_MODRM_REG_MODE(bRm))
1957 {
1958 /*
1959 * Register, register.
1960 */
1961 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1962 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1963
1964 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1965 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1966 if (pVCpu->iem.s.uVexLength == 0)
1967 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1968 IEM_GET_MODRM_REG(pVCpu, bRm));
1969 else
1970 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1971 IEM_GET_MODRM_REG(pVCpu, bRm));
1972 IEM_MC_ADVANCE_RIP_AND_FINISH();
1973 IEM_MC_END();
1974 }
1975 else
1976 {
1977 /*
1978 * Register, memory.
1979 */
1980 if (pVCpu->iem.s.uVexLength == 0)
1981 {
1982 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984 IEM_MC_LOCAL(RTUINT128U, uSrc);
1985
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1989 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1990
1991 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1993
1994 IEM_MC_ADVANCE_RIP_AND_FINISH();
1995 IEM_MC_END();
1996 }
1997 else
1998 {
1999 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2001 IEM_MC_LOCAL(RTUINT256U, uSrc);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2004 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2007
2008 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2009 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP_AND_FINISH();
2012 IEM_MC_END();
2013 }
2014 }
2015}
2016
2017/**
2018 * @opcode 0x29
2019 * @oppfx 66
2020 * @opcpuid avx
2021 * @opgroup og_avx_pcksclr_datamove
2022 * @opxcpttype 1
2023 * @optest op1=1 op2=2 -> op1=2
2024 * @optest op1=0 op2=-42 -> op1=-42
2025 * @note Almost identical to vmovaps
2026 */
2027FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2028{
2029 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2030 Assert(pVCpu->iem.s.uVexLength <= 1);
2031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2032 if (IEM_IS_MODRM_REG_MODE(bRm))
2033 {
2034 /*
2035 * Register, register.
2036 */
2037 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
2038 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2039
2040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2041 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2042 if (pVCpu->iem.s.uVexLength == 0)
2043 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2044 IEM_GET_MODRM_REG(pVCpu, bRm));
2045 else
2046 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2047 IEM_GET_MODRM_REG(pVCpu, bRm));
2048 IEM_MC_ADVANCE_RIP_AND_FINISH();
2049 IEM_MC_END();
2050 }
2051 else
2052 {
2053 /*
2054 * Register, memory.
2055 */
2056 if (pVCpu->iem.s.uVexLength == 0)
2057 {
2058 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2060 IEM_MC_LOCAL(RTUINT128U, uSrc);
2061
2062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2066
2067 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2068 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2069
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 else
2074 {
2075 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT256U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2083
2084 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2085 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 }
2091}
2092
2093
2094/**
2095 * @opmnemonic udvexf30f29
2096 * @opcode 0x29
2097 * @oppfx 0xf3
2098 * @opunused vex.modrm
2099 * @opcpuid avx
2100 * @optest ->
2101 * @opdone
2102 */
2103
2104/**
2105 * @opmnemonic udvexf20f29
2106 * @opcode 0x29
2107 * @oppfx 0xf2
2108 * @opunused vex.modrm
2109 * @opcpuid avx
2110 * @optest ->
2111 * @opdone
2112 */
2113
2114
2115/** Opcode VEX.0F 0x2a - invalid */
2116/** Opcode VEX.66.0F 0x2a - invalid */
2117/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2118FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2119/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2120FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2121
2122
2123/**
2124 * @opcode 0x2b
2125 * @opcodesub !11 mr/reg
2126 * @oppfx none
2127 * @opcpuid avx
2128 * @opgroup og_avx_cachect
2129 * @opxcpttype 1
2130 * @optest op1=1 op2=2 -> op1=2
2131 * @optest op1=0 op2=-42 -> op1=-42
2132 * @note Identical implementation to vmovntpd
2133 */
2134FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2135{
2136 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2137 Assert(pVCpu->iem.s.uVexLength <= 1);
2138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2139 if (IEM_IS_MODRM_MEM_MODE(bRm))
2140 {
2141 /*
2142 * memory, register.
2143 */
2144 if (pVCpu->iem.s.uVexLength == 0)
2145 {
2146 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2147 IEM_MC_LOCAL(RTUINT128U, uSrc);
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2149
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2151 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2152 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2153 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2154
2155 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2156 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2157
2158 IEM_MC_ADVANCE_RIP_AND_FINISH();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2164 IEM_MC_LOCAL(RTUINT256U, uSrc);
2165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2166
2167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2168 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2170 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2171
2172 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2173 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2174
2175 IEM_MC_ADVANCE_RIP_AND_FINISH();
2176 IEM_MC_END();
2177 }
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 IEMOP_RAISE_INVALID_OPCODE_RET();
2182}
2183
2184/**
2185 * @opcode 0x2b
2186 * @opcodesub !11 mr/reg
2187 * @oppfx 0x66
2188 * @opcpuid avx
2189 * @opgroup og_avx_cachect
2190 * @opxcpttype 1
2191 * @optest op1=1 op2=2 -> op1=2
2192 * @optest op1=0 op2=-42 -> op1=-42
2193 * @note Identical implementation to vmovntps
2194 */
2195FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2196{
2197 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2198 Assert(pVCpu->iem.s.uVexLength <= 1);
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 if (IEM_IS_MODRM_MEM_MODE(bRm))
2201 {
2202 /*
2203 * memory, register.
2204 */
2205 if (pVCpu->iem.s.uVexLength == 0)
2206 {
2207 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2208 IEM_MC_LOCAL(RTUINT128U, uSrc);
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2215
2216 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2217 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2218
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2225 IEM_MC_LOCAL(RTUINT256U, uSrc);
2226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2227
2228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2229 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232
2233 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2234 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2235
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 }
2239 }
2240 /* The register, register encoding is invalid. */
2241 else
2242 IEMOP_RAISE_INVALID_OPCODE_RET();
2243}
2244
2245/**
2246 * @opmnemonic udvexf30f2b
2247 * @opcode 0x2b
2248 * @oppfx 0xf3
2249 * @opunused vex.modrm
2250 * @opcpuid avx
2251 * @optest ->
2252 * @opdone
2253 */
2254
2255/**
2256 * @opmnemonic udvexf20f2b
2257 * @opcode 0x2b
2258 * @oppfx 0xf2
2259 * @opunused vex.modrm
2260 * @opcpuid avx
2261 * @optest ->
2262 * @opdone
2263 */
2264
2265
2266/* Opcode VEX.0F 0x2c - invalid */
2267/* Opcode VEX.66.0F 0x2c - invalid */
2268/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2269FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2270/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2271FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2272
2273/* Opcode VEX.0F 0x2d - invalid */
2274/* Opcode VEX.66.0F 0x2d - invalid */
2275/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2276FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2277/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2278FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2279
2280
2281/**
2282 * @opcode 0x2e
2283 * @oppfx none
2284 * @opflmodify cf,pf,af,zf,sf,ov
2285 * @opflclear af,sf,of
2286 */
2287FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2288{
2289 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2291 if (IEM_IS_MODRM_REG_MODE(bRm))
2292 {
2293 /*
2294 * Register, register.
2295 */
2296 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2297 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2298 IEM_MC_LOCAL(uint32_t, fEFlags);
2299 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2301 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2302 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2304 IEM_MC_PREPARE_AVX_USAGE();
2305 IEM_MC_FETCH_EFLAGS(fEFlags);
2306 IEM_MC_REF_MXCSR(pfMxcsr);
2307 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2308 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2309 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2310 pfMxcsr, pEFlags, puSrc1, puSrc2);
2311 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2312 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2313 } IEM_MC_ELSE() {
2314 IEM_MC_COMMIT_EFLAGS(fEFlags);
2315 } IEM_MC_ENDIF();
2316
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 else
2321 {
2322 /*
2323 * Register, memory.
2324 */
2325 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2326 IEM_MC_LOCAL(uint32_t, fEFlags);
2327 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2328 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2329 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2330 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2331 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2335 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2336 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2337 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2338
2339 IEM_MC_PREPARE_AVX_USAGE();
2340 IEM_MC_FETCH_EFLAGS(fEFlags);
2341 IEM_MC_REF_MXCSR(pfMxcsr);
2342 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2343 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2344 pfMxcsr, pEFlags, puSrc1, puSrc2);
2345 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2346 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2347 } IEM_MC_ELSE() {
2348 IEM_MC_COMMIT_EFLAGS(fEFlags);
2349 } IEM_MC_ENDIF();
2350
2351 IEM_MC_ADVANCE_RIP_AND_FINISH();
2352 IEM_MC_END();
2353 }
2354}
2355
2356
2357/**
2358 * @opcode 0x2e
2359 * @oppfx 0x66
2360 * @opflmodify cf,pf,af,zf,sf,ov
2361 * @opflclear af,sf,of
2362 */
2363FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2364{
2365 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2367 if (IEM_IS_MODRM_REG_MODE(bRm))
2368 {
2369 /*
2370 * Register, register.
2371 */
2372 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2373 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2374 IEM_MC_LOCAL(uint32_t, fEFlags);
2375 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2376 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2377 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2378 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2380 IEM_MC_PREPARE_AVX_USAGE();
2381 IEM_MC_FETCH_EFLAGS(fEFlags);
2382 IEM_MC_REF_MXCSR(pfMxcsr);
2383 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2384 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2385 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2386 pfMxcsr, pEFlags, puSrc1, puSrc2);
2387 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2388 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2389 } IEM_MC_ELSE() {
2390 IEM_MC_COMMIT_EFLAGS(fEFlags);
2391 } IEM_MC_ENDIF();
2392
2393 IEM_MC_ADVANCE_RIP_AND_FINISH();
2394 IEM_MC_END();
2395 }
2396 else
2397 {
2398 /*
2399 * Register, memory.
2400 */
2401 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2402 IEM_MC_LOCAL(uint32_t, fEFlags);
2403 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2404 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2405 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2406 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2407 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2412 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2413 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2414
2415 IEM_MC_PREPARE_AVX_USAGE();
2416 IEM_MC_FETCH_EFLAGS(fEFlags);
2417 IEM_MC_REF_MXCSR(pfMxcsr);
2418 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2419 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2420 pfMxcsr, pEFlags, puSrc1, puSrc2);
2421 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2422 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2423 } IEM_MC_ELSE() {
2424 IEM_MC_COMMIT_EFLAGS(fEFlags);
2425 } IEM_MC_ENDIF();
2426
2427 IEM_MC_ADVANCE_RIP_AND_FINISH();
2428 IEM_MC_END();
2429 }
2430}
2431
2432
2433/* Opcode VEX.F3.0F 0x2e - invalid */
2434/* Opcode VEX.F2.0F 0x2e - invalid */
2435
2436/**
2437 * @opcode 0x2f
2438 * @oppfx none
2439 * @opflmodify cf,pf,af,zf,sf,ov
2440 * @opflclear af,sf,of
2441 */
2442FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2443{
2444 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2446 if (IEM_IS_MODRM_REG_MODE(bRm))
2447 {
2448 /*
2449 * Register, register.
2450 */
2451 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2452 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2453 IEM_MC_LOCAL(uint32_t, fEFlags);
2454 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2455 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2456 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2457 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2459 IEM_MC_PREPARE_AVX_USAGE();
2460 IEM_MC_FETCH_EFLAGS(fEFlags);
2461 IEM_MC_REF_MXCSR(pfMxcsr);
2462 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2463 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2464 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2465 pfMxcsr, pEFlags, puSrc1, puSrc2);
2466 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2467 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2468 } IEM_MC_ELSE() {
2469 IEM_MC_COMMIT_EFLAGS(fEFlags);
2470 } IEM_MC_ENDIF();
2471
2472 IEM_MC_ADVANCE_RIP_AND_FINISH();
2473 IEM_MC_END();
2474 }
2475 else
2476 {
2477 /*
2478 * Register, memory.
2479 */
2480 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2481 IEM_MC_LOCAL(uint32_t, fEFlags);
2482 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2483 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2484 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2485 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2486 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2492 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2493
2494 IEM_MC_PREPARE_AVX_USAGE();
2495 IEM_MC_FETCH_EFLAGS(fEFlags);
2496 IEM_MC_REF_MXCSR(pfMxcsr);
2497 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2498 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2499 pfMxcsr, pEFlags, puSrc1, puSrc2);
2500 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2501 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2502 } IEM_MC_ELSE() {
2503 IEM_MC_COMMIT_EFLAGS(fEFlags);
2504 } IEM_MC_ENDIF();
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512/**
2513 * @opcode 0x2f
2514 * @oppfx 0x66
2515 * @opflmodify cf,pf,af,zf,sf,ov
2516 * @opflclear af,sf,of
2517 */
2518FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2519{
2520 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2522 if (IEM_IS_MODRM_REG_MODE(bRm))
2523 {
2524 /*
2525 * Register, register.
2526 */
2527 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2528 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2529 IEM_MC_LOCAL(uint32_t, fEFlags);
2530 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2531 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2532 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2533 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2535 IEM_MC_PREPARE_AVX_USAGE();
2536 IEM_MC_FETCH_EFLAGS(fEFlags);
2537 IEM_MC_REF_MXCSR(pfMxcsr);
2538 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2539 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2540 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2541 pfMxcsr, pEFlags, puSrc1, puSrc2);
2542 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2543 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 } IEM_MC_ELSE() {
2545 IEM_MC_COMMIT_EFLAGS(fEFlags);
2546 } IEM_MC_ENDIF();
2547
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 }
2551 else
2552 {
2553 /*
2554 * Register, memory.
2555 */
2556 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2557 IEM_MC_LOCAL(uint32_t, fEFlags);
2558 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2559 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2560 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2561 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2562 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2564
2565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2566 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2567 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2568 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569
2570 IEM_MC_PREPARE_AVX_USAGE();
2571 IEM_MC_FETCH_EFLAGS(fEFlags);
2572 IEM_MC_REF_MXCSR(pfMxcsr);
2573 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2574 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2575 pfMxcsr, pEFlags, puSrc1, puSrc2);
2576 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2577 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2578 } IEM_MC_ELSE() {
2579 IEM_MC_COMMIT_EFLAGS(fEFlags);
2580 } IEM_MC_ENDIF();
2581
2582 IEM_MC_ADVANCE_RIP_AND_FINISH();
2583 IEM_MC_END();
2584 }
2585}
2586
2587
2588/* Opcode VEX.F3.0F 0x2f - invalid */
2589/* Opcode VEX.F2.0F 0x2f - invalid */
2590
2591/* Opcode VEX.0F 0x30 - invalid */
2592/* Opcode VEX.0F 0x31 - invalid */
2593/* Opcode VEX.0F 0x32 - invalid */
2594/* Opcode VEX.0F 0x33 - invalid */
2595/* Opcode VEX.0F 0x34 - invalid */
2596/* Opcode VEX.0F 0x35 - invalid */
2597/* Opcode VEX.0F 0x36 - invalid */
2598/* Opcode VEX.0F 0x37 - invalid */
2599/* Opcode VEX.0F 0x38 - invalid */
2600/* Opcode VEX.0F 0x39 - invalid */
2601/* Opcode VEX.0F 0x3a - invalid */
2602/* Opcode VEX.0F 0x3b - invalid */
2603/* Opcode VEX.0F 0x3c - invalid */
2604/* Opcode VEX.0F 0x3d - invalid */
2605/* Opcode VEX.0F 0x3e - invalid */
2606/* Opcode VEX.0F 0x3f - invalid */
2607/* Opcode VEX.0F 0x40 - invalid */
2608/* Opcode VEX.0F 0x41 - invalid */
2609/* Opcode VEX.0F 0x42 - invalid */
2610/* Opcode VEX.0F 0x43 - invalid */
2611/* Opcode VEX.0F 0x44 - invalid */
2612/* Opcode VEX.0F 0x45 - invalid */
2613/* Opcode VEX.0F 0x46 - invalid */
2614/* Opcode VEX.0F 0x47 - invalid */
2615/* Opcode VEX.0F 0x48 - invalid */
2616/* Opcode VEX.0F 0x49 - invalid */
2617/* Opcode VEX.0F 0x4a - invalid */
2618/* Opcode VEX.0F 0x4b - invalid */
2619/* Opcode VEX.0F 0x4c - invalid */
2620/* Opcode VEX.0F 0x4d - invalid */
2621/* Opcode VEX.0F 0x4e - invalid */
2622/* Opcode VEX.0F 0x4f - invalid */
2623
2624
2625/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2626FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2627{
2628 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 if (IEM_IS_MODRM_REG_MODE(bRm))
2631 {
2632 /*
2633 * Register, register.
2634 */
2635 if (pVCpu->iem.s.uVexLength == 0)
2636 {
2637 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2638 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2639 IEM_MC_LOCAL(uint8_t, u8Dst);
2640 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2641 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2642 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2643 IEM_MC_PREPARE_AVX_USAGE();
2644 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2645 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2646 pu8Dst, puSrc);
2647 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2648 IEM_MC_ADVANCE_RIP_AND_FINISH();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2654 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2655 IEM_MC_LOCAL(uint8_t, u8Dst);
2656 IEM_MC_LOCAL(RTUINT256U, uSrc);
2657 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2658 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2659
2660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2661 IEM_MC_PREPARE_AVX_USAGE();
2662 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2664 pu8Dst, puSrc);
2665 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 }
2670 /* No memory operand. */
2671 else
2672 IEMOP_RAISE_INVALID_OPCODE_RET();
2673}
2674
2675
2676/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2677FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2678{
2679 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681 if (IEM_IS_MODRM_REG_MODE(bRm))
2682 {
2683 /*
2684 * Register, register.
2685 */
2686 if (pVCpu->iem.s.uVexLength == 0)
2687 {
2688 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2689 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2690 IEM_MC_LOCAL(uint8_t, u8Dst);
2691 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2692 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_PREPARE_AVX_USAGE();
2695 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2696 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2697 pu8Dst, puSrc);
2698 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2706 IEM_MC_LOCAL(uint8_t, u8Dst);
2707 IEM_MC_LOCAL(RTUINT256U, uSrc);
2708 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2709 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2710
2711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2712 IEM_MC_PREPARE_AVX_USAGE();
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2714 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2715 pu8Dst, puSrc);
2716 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 }
2720 }
2721 /* No memory operand. */
2722 else
2723 IEMOP_RAISE_INVALID_OPCODE_RET();
2724}
2725
2726
2727/* Opcode VEX.F3.0F 0x50 - invalid */
2728/* Opcode VEX.F2.0F 0x50 - invalid */
2729
2730/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2731FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2732/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2733FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2734/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2735FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2736/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2737FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2738
2739/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2740FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2741/* Opcode VEX.66.0F 0x52 - invalid */
2742/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2743FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2744/* Opcode VEX.F2.0F 0x52 - invalid */
2745
2746/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2747FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2748/* Opcode VEX.66.0F 0x53 - invalid */
2749/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2750FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2751/* Opcode VEX.F2.0F 0x53 - invalid */
2752
2753
2754/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2755FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2756{
2757 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2758 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2759 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2760}
2761
2762
2763/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2764FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2765{
2766 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2767 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2768 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2769}
2770
2771
2772/* Opcode VEX.F3.0F 0x54 - invalid */
2773/* Opcode VEX.F2.0F 0x54 - invalid */
2774
2775
2776/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2777FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2778{
2779 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2781 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2782}
2783
2784
2785/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2786FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2787{
2788 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2790 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2791}
2792
2793
2794/* Opcode VEX.F3.0F 0x55 - invalid */
2795/* Opcode VEX.F2.0F 0x55 - invalid */
2796
2797/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2798FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2799{
2800 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2802 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2803}
2804
2805
2806/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2807FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2808{
2809 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2810 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2811 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2812}
2813
2814
2815/* Opcode VEX.F3.0F 0x56 - invalid */
2816/* Opcode VEX.F2.0F 0x56 - invalid */
2817
2818
2819/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2820FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2821{
2822 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2823 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2824 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2825}
2826
2827
2828/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2829FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2830{
2831 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2834}
2835
2836
2837/* Opcode VEX.F3.0F 0x57 - invalid */
2838/* Opcode VEX.F2.0F 0x57 - invalid */
2839
2840/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2841FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2842/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2843FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2844/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2845FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2846/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2847FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2848
2849/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2850FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2851/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2852FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2853/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2854FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2855/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2856FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2857
2858/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2859FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2860/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2861FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2862/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2863FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2864/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2865FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2866
2867/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2868FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2869/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2870FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2871/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2872FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2873/* Opcode VEX.F2.0F 0x5b - invalid */
2874
2875/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2876FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2877/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2878FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2879/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2880FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2881/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2882FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2883
2884/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2885FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2886/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2887FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2888/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2889FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2890/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2891FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2892
2893/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2894FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2895/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2896FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2897/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2898FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2899/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2900FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2901
2902/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2903FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2904/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2905FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2906/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2907FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2908/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2909FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2910
2911
2912/* Opcode VEX.0F 0x60 - invalid */
2913
2914
2915/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2916FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2917{
2918 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2919 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2921}
2922
2923
2924/* Opcode VEX.F3.0F 0x60 - invalid */
2925
2926
2927/* Opcode VEX.0F 0x61 - invalid */
2928
2929
2930/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2931FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2932{
2933 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2934 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2935 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2936}
2937
2938
2939/* Opcode VEX.F3.0F 0x61 - invalid */
2940
2941
2942/* Opcode VEX.0F 0x62 - invalid */
2943
2944/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2945FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2946{
2947 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2948 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2949 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2950}
2951
2952
2953/* Opcode VEX.F3.0F 0x62 - invalid */
2954
2955
2956
2957/* Opcode VEX.0F 0x63 - invalid */
2958
2959
2960/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2961FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2962{
2963 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2964 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2966}
2967
2968
2969/* Opcode VEX.F3.0F 0x63 - invalid */
2970
2971/* Opcode VEX.0F 0x64 - invalid */
2972
2973
2974/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2975FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2976{
2977 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2978 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2979 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2980}
2981
2982
2983/* Opcode VEX.F3.0F 0x64 - invalid */
2984
2985/* Opcode VEX.0F 0x65 - invalid */
2986
2987
2988/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2989FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2990{
2991 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2992 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2994}
2995
2996
2997/* Opcode VEX.F3.0F 0x65 - invalid */
2998
2999/* Opcode VEX.0F 0x66 - invalid */
3000
3001
3002/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3003FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3004{
3005 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3006 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3008}
3009
3010
3011/* Opcode VEX.F3.0F 0x66 - invalid */
3012
3013/* Opcode VEX.0F 0x67 - invalid */
3014
3015
3016/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3017FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3018{
3019 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3020 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3021 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3022}
3023
3024
3025/* Opcode VEX.F3.0F 0x67 - invalid */
3026
3027
3028///**
3029// * Common worker for SSE2 instructions on the form:
3030// * pxxxx xmm1, xmm2/mem128
3031// *
3032// * The 2nd operand is the second half of a register, which in the memory case
3033// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3034// * where it may read the full 128 bits or only the upper 64 bits.
3035// *
3036// * Exceptions type 4.
3037// */
3038//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3039//{
3040// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041// if (IEM_IS_MODRM_REG_MODE(bRm))
3042// {
3043// /*
3044// * Register, register.
3045// */
3046// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3047// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3048// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3049// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3050// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3051// IEM_MC_PREPARE_SSE_USAGE();
3052// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3053// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3054// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3055// IEM_MC_ADVANCE_RIP_AND_FINISH();
3056// IEM_MC_END();
3057// }
3058// else
3059// {
3060// /*
3061// * Register, memory.
3062// */
3063// IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3064// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3065// IEM_MC_LOCAL(RTUINT128U, uSrc);
3066// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3067// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3068//
3069// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3070// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3071// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3072// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3073//
3074// IEM_MC_PREPARE_SSE_USAGE();
3075// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3076// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3077//
3078// IEM_MC_ADVANCE_RIP_AND_FINISH();
3079// IEM_MC_END();
3080// }
3081// return VINF_SUCCESS;
3082//}
3083
3084
3085/* Opcode VEX.0F 0x68 - invalid */
3086
3087/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3088FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3089{
3090 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3091 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3093}
3094
3095
3096/* Opcode VEX.F3.0F 0x68 - invalid */
3097
3098
3099/* Opcode VEX.0F 0x69 - invalid */
3100
3101
3102/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3103FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3104{
3105 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3106 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3108}
3109
3110
3111/* Opcode VEX.F3.0F 0x69 - invalid */
3112
3113
3114/* Opcode VEX.0F 0x6a - invalid */
3115
3116
3117/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3118FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3119{
3120 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3121 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3122 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3123}
3124
3125
3126/* Opcode VEX.F3.0F 0x6a - invalid */
3127
3128
3129/* Opcode VEX.0F 0x6b - invalid */
3130
3131
3132/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3133FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3134{
3135 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3136 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3137 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3138}
3139
3140
3141/* Opcode VEX.F3.0F 0x6b - invalid */
3142
3143
3144/* Opcode VEX.0F 0x6c - invalid */
3145
3146
3147/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3148FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3149{
3150 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3151 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3153}
3154
3155
3156/* Opcode VEX.F3.0F 0x6c - invalid */
3157/* Opcode VEX.F2.0F 0x6c - invalid */
3158
3159
3160/* Opcode VEX.0F 0x6d - invalid */
3161
3162
3163/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3164FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3165{
3166 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3167 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3168 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3169}
3170
3171
3172/* Opcode VEX.F3.0F 0x6d - invalid */
3173
3174
3175/* Opcode VEX.0F 0x6e - invalid */
3176
3177FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3181 {
3182 /**
3183 * @opcode 0x6e
3184 * @opcodesub rex.w=1
3185 * @oppfx 0x66
3186 * @opcpuid avx
3187 * @opgroup og_avx_simdint_datamov
3188 * @opxcpttype 5
3189 * @optest 64-bit / op1=1 op2=2 -> op1=2
3190 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3191 */
3192 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3193 if (IEM_IS_MODRM_REG_MODE(bRm))
3194 {
3195 /* XMM, greg64 */
3196 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3197 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199
3200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3201 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3202
3203 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3204 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3205
3206 IEM_MC_ADVANCE_RIP_AND_FINISH();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 /* XMM, [mem64] */
3212 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3214 IEM_MC_LOCAL(uint64_t, u64Tmp);
3215
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3217 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3219 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3220
3221 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3222 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3223
3224 IEM_MC_ADVANCE_RIP_AND_FINISH();
3225 IEM_MC_END();
3226 }
3227 }
3228 else
3229 {
3230 /**
3231 * @opdone
3232 * @opcode 0x6e
3233 * @opcodesub rex.w=0
3234 * @oppfx 0x66
3235 * @opcpuid avx
3236 * @opgroup og_avx_simdint_datamov
3237 * @opxcpttype 5
3238 * @opfunction iemOp_vmovd_q_Vy_Ey
3239 * @optest op1=1 op2=2 -> op1=2
3240 * @optest op1=0 op2=-42 -> op1=-42
3241 */
3242 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3243 if (IEM_IS_MODRM_REG_MODE(bRm))
3244 {
3245 /* XMM, greg32 */
3246 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3247 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3248 IEM_MC_LOCAL(uint32_t, u32Tmp);
3249
3250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3251 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3252
3253 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3254 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3255
3256 IEM_MC_ADVANCE_RIP_AND_FINISH();
3257 IEM_MC_END();
3258 }
3259 else
3260 {
3261 /* XMM, [mem32] */
3262 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3264 IEM_MC_LOCAL(uint32_t, u32Tmp);
3265
3266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3267 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3270
3271 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3272 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3273
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 IEM_MC_END();
3276 }
3277 }
3278}
3279
3280
3281/* Opcode VEX.F3.0F 0x6e - invalid */
3282
3283
3284/* Opcode VEX.0F 0x6f - invalid */
3285
3286/**
3287 * @opcode 0x6f
3288 * @oppfx 0x66
3289 * @opcpuid avx
3290 * @opgroup og_avx_simdint_datamove
3291 * @opxcpttype 1
3292 * @optest op1=1 op2=2 -> op1=2
3293 * @optest op1=0 op2=-42 -> op1=-42
3294 */
3295FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3296{
3297 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3298 Assert(pVCpu->iem.s.uVexLength <= 1);
3299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3300 if (IEM_IS_MODRM_REG_MODE(bRm))
3301 {
3302 /*
3303 * Register, register.
3304 */
3305 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3306 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3307
3308 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3309 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3310 if (pVCpu->iem.s.uVexLength == 0)
3311 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3312 IEM_GET_MODRM_RM(pVCpu, bRm));
3313 else
3314 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3315 IEM_GET_MODRM_RM(pVCpu, bRm));
3316 IEM_MC_ADVANCE_RIP_AND_FINISH();
3317 IEM_MC_END();
3318 }
3319 else if (pVCpu->iem.s.uVexLength == 0)
3320 {
3321 /*
3322 * Register, memory128.
3323 */
3324 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3325 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3327
3328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3332
3333 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3334 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3335
3336 IEM_MC_ADVANCE_RIP_AND_FINISH();
3337 IEM_MC_END();
3338 }
3339 else
3340 {
3341 /*
3342 * Register, memory256.
3343 */
3344 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3345 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3347
3348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3352
3353 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3354 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3355
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359}
3360
3361/**
3362 * @opcode 0x6f
3363 * @oppfx 0xf3
3364 * @opcpuid avx
3365 * @opgroup og_avx_simdint_datamove
3366 * @opxcpttype 4UA
3367 * @optest op1=1 op2=2 -> op1=2
3368 * @optest op1=0 op2=-42 -> op1=-42
3369 */
3370FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3371{
3372 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3373 Assert(pVCpu->iem.s.uVexLength <= 1);
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 if (IEM_IS_MODRM_REG_MODE(bRm))
3376 {
3377 /*
3378 * Register, register.
3379 */
3380 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3381 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3382
3383 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3385 if (pVCpu->iem.s.uVexLength == 0)
3386 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3387 IEM_GET_MODRM_RM(pVCpu, bRm));
3388 else
3389 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3390 IEM_GET_MODRM_RM(pVCpu, bRm));
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 IEM_MC_END();
3393 }
3394 else if (pVCpu->iem.s.uVexLength == 0)
3395 {
3396 /*
3397 * Register, memory128.
3398 */
3399 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3400 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3402
3403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3404 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3407
3408 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3409 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3410
3411 IEM_MC_ADVANCE_RIP_AND_FINISH();
3412 IEM_MC_END();
3413 }
3414 else
3415 {
3416 /*
3417 * Register, memory256.
3418 */
3419 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3420 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3422
3423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3424 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3425 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3426 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3427
3428 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3429 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3430
3431 IEM_MC_ADVANCE_RIP_AND_FINISH();
3432 IEM_MC_END();
3433 }
3434}
3435
3436
3437/* Opcode VEX.0F 0x70 - invalid */
3438
3439
3440/**
3441 * Common worker for AVX/AVX2 instructions on the forms:
3442 * - vpxxx xmm0, xmm2/mem128, imm8
3443 * - vpxxx ymm0, ymm2/mem256, imm8
3444 *
3445 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3446 */
3447FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3448{
3449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3450 if (IEM_IS_MODRM_REG_MODE(bRm))
3451 {
3452 /*
3453 * Register, register.
3454 */
3455 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3456 if (pVCpu->iem.s.uVexLength)
3457 {
3458 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3459 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3460 IEM_MC_LOCAL(RTUINT256U, uDst);
3461 IEM_MC_LOCAL(RTUINT256U, uSrc);
3462 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3463 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3464 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3465 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3466 IEM_MC_PREPARE_AVX_USAGE();
3467 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3468 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3469 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473 else
3474 {
3475 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3476 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3477 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3478 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3479 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3480 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3481 IEM_MC_PREPARE_AVX_USAGE();
3482 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3484 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3485 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3486 IEM_MC_ADVANCE_RIP_AND_FINISH();
3487 IEM_MC_END();
3488 }
3489 }
3490 else
3491 {
3492 /*
3493 * Register, memory.
3494 */
3495 if (pVCpu->iem.s.uVexLength)
3496 {
3497 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
3498 IEM_MC_LOCAL(RTUINT256U, uDst);
3499 IEM_MC_LOCAL(RTUINT256U, uSrc);
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3502 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3503
3504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3505 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3506 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3507 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3508 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3509 IEM_MC_PREPARE_AVX_USAGE();
3510
3511 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3512 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3513 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3514
3515 IEM_MC_ADVANCE_RIP_AND_FINISH();
3516 IEM_MC_END();
3517 }
3518 else
3519 {
3520 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3521 IEM_MC_LOCAL(RTUINT128U, uSrc);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3523 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3524 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3525
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3527 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3528 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3531 IEM_MC_PREPARE_AVX_USAGE();
3532
3533 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3534 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3535 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3536 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3537
3538 IEM_MC_ADVANCE_RIP_AND_FINISH();
3539 IEM_MC_END();
3540 }
3541 }
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3546FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3547{
3548 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3549 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3551
3552}
3553
3554
3555/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3556FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3557{
3558 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3559 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3560 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3561
3562}
3563
3564
3565/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3566FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3567{
3568 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3569 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3570 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3571}
3572
3573
3574/**
3575 * Common worker(s) for AVX/AVX2 instructions on the forms:
3576 * - vpxxx xmm0, xmm2, imm8
3577 * - vpxxx ymm0, ymm2, imm8
3578 *
3579 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3580 */
3581FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3582{
3583 if (IEM_IS_MODRM_REG_MODE(bRm))
3584 {
3585 /*
3586 * Register, register.
3587 */
3588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3589 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3590 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3592 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3595 IEM_MC_PREPARE_AVX_USAGE();
3596 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3597 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3598 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3599 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3600 IEM_MC_ADVANCE_RIP_AND_FINISH();
3601 IEM_MC_END();
3602 }
3603 /* No memory operand. */
3604 else
3605 IEMOP_RAISE_INVALID_OPCODE_RET();
3606}
3607
3608FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3609{
3610 if (IEM_IS_MODRM_REG_MODE(bRm))
3611 {
3612 /*
3613 * Register, register.
3614 */
3615 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3616 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3617 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3618 IEM_MC_LOCAL(RTUINT256U, uDst);
3619 IEM_MC_LOCAL(RTUINT256U, uSrc);
3620 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3621 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3622 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3623 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3624 IEM_MC_PREPARE_AVX_USAGE();
3625 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3626 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3627 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3628 IEM_MC_ADVANCE_RIP_AND_FINISH();
3629 IEM_MC_END();
3630 }
3631 /* No memory operand. */
3632 else
3633 IEMOP_RAISE_INVALID_OPCODE_RET();
3634}
3635
3636
3637/* Opcode VEX.0F 0x71 11/2 - invalid. */
3638/** Opcode VEX.66.0F 0x71 11/2. */
3639FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3640
3641/* Opcode VEX.0F 0x71 11/4 - invalid */
3642/** Opcode VEX.66.0F 0x71 11/4. */
3643FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3644
3645/* Opcode VEX.0F 0x71 11/6 - invalid */
3646
3647/** Opcode VEX.66.0F 0x71 11/6. */
3648FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3649{
3650 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3651 if (pVCpu->iem.s.uVexLength)
3652 {
3653 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3654 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3655 }
3656 else
3657 {
3658 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3659 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3660 }
3661}
3662
3663
3664/**
3665 * VEX Group 12 jump table for register variant.
3666 */
3667IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3668{
3669 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3670 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3671 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3672 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3673 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3674 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3675 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3676 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3677};
3678AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3679
3680
3681/** Opcode VEX.0F 0x71. */
3682FNIEMOP_DEF(iemOp_VGrp12)
3683{
3684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3685 if (IEM_IS_MODRM_REG_MODE(bRm))
3686 /* register, register */
3687 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3688 + pVCpu->iem.s.idxPrefix], bRm);
3689 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3690}
3691
3692
3693/* Opcode VEX.0F 0x72 11/2 - invalid. */
3694/** Opcode VEX.66.0F 0x72 11/2. */
3695FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3696
3697/* Opcode VEX.0F 0x72 11/4 - invalid. */
3698/** Opcode VEX.66.0F 0x72 11/4. */
3699FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3700
3701/* Opcode VEX.0F 0x72 11/6 - invalid. */
3702
3703/** Opcode VEX.66.0F 0x72 11/6. */
3704FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3705{
3706 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3707 if (pVCpu->iem.s.uVexLength)
3708 {
3709 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3710 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3711 }
3712 else
3713 {
3714 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3715 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3716 }
3717}
3718
3719
3720/**
3721 * Group 13 jump table for register variant.
3722 */
3723IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3724{
3725 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3726 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3727 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3728 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3729 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3730 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3731 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3732 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3733};
3734AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3735
3736/** Opcode VEX.0F 0x72. */
3737FNIEMOP_DEF(iemOp_VGrp13)
3738{
3739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3740 if (IEM_IS_MODRM_REG_MODE(bRm))
3741 /* register, register */
3742 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3743 + pVCpu->iem.s.idxPrefix], bRm);
3744 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3745}
3746
3747
3748/* Opcode VEX.0F 0x73 11/2 - invalid. */
3749/** Opcode VEX.66.0F 0x73 11/2. */
3750FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3751
3752/** Opcode VEX.66.0F 0x73 11/3. */
3753FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3754
3755/* Opcode VEX.0F 0x73 11/6 - invalid. */
3756
3757/** Opcode VEX.66.0F 0x73 11/6. */
3758FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3759{
3760 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3761 if (pVCpu->iem.s.uVexLength)
3762 {
3763 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3764 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3765 }
3766 else
3767 {
3768 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3769 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3770 }
3771}
3772
3773/** Opcode VEX.66.0F 0x73 11/7. */
3774FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3775
3776/**
3777 * Group 14 jump table for register variant.
3778 */
3779IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3780{
3781 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3782 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3783 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3784 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3785 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3786 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3787 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3788 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3789};
3790AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3791
3792
3793/** Opcode VEX.0F 0x73. */
3794FNIEMOP_DEF(iemOp_VGrp14)
3795{
3796 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3797 if (IEM_IS_MODRM_REG_MODE(bRm))
3798 /* register, register */
3799 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3800 + pVCpu->iem.s.idxPrefix], bRm);
3801 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3802}
3803
3804
3805/* Opcode VEX.0F 0x74 - invalid */
3806
3807
3808/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3809FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3810{
3811 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3812 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3813 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3814}
3815
3816/* Opcode VEX.F3.0F 0x74 - invalid */
3817/* Opcode VEX.F2.0F 0x74 - invalid */
3818
3819
3820/* Opcode VEX.0F 0x75 - invalid */
3821
3822
3823/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3824FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3825{
3826 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3827 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3828 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3829}
3830
3831
3832/* Opcode VEX.F3.0F 0x75 - invalid */
3833/* Opcode VEX.F2.0F 0x75 - invalid */
3834
3835
3836/* Opcode VEX.0F 0x76 - invalid */
3837
3838
3839/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3840FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3841{
3842 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3843 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3845}
3846
3847
3848/* Opcode VEX.F3.0F 0x76 - invalid */
3849/* Opcode VEX.F2.0F 0x76 - invalid */
3850
3851
3852/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3853FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3854{
3855 Assert(pVCpu->iem.s.uVexLength <= 1);
3856 if (pVCpu->iem.s.uVexLength == 0)
3857 {
3858 /*
3859 * 128-bit: vzeroupper
3860 */
3861 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3862 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3863
3864 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3865 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3866 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3867
3868 IEM_MC_CLEAR_YREG_128_UP(0);
3869 IEM_MC_CLEAR_YREG_128_UP(1);
3870 IEM_MC_CLEAR_YREG_128_UP(2);
3871 IEM_MC_CLEAR_YREG_128_UP(3);
3872 IEM_MC_CLEAR_YREG_128_UP(4);
3873 IEM_MC_CLEAR_YREG_128_UP(5);
3874 IEM_MC_CLEAR_YREG_128_UP(6);
3875 IEM_MC_CLEAR_YREG_128_UP(7);
3876
3877 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3878 {
3879 IEM_MC_CLEAR_YREG_128_UP( 8);
3880 IEM_MC_CLEAR_YREG_128_UP( 9);
3881 IEM_MC_CLEAR_YREG_128_UP(10);
3882 IEM_MC_CLEAR_YREG_128_UP(11);
3883 IEM_MC_CLEAR_YREG_128_UP(12);
3884 IEM_MC_CLEAR_YREG_128_UP(13);
3885 IEM_MC_CLEAR_YREG_128_UP(14);
3886 IEM_MC_CLEAR_YREG_128_UP(15);
3887 }
3888
3889 IEM_MC_ADVANCE_RIP_AND_FINISH();
3890 IEM_MC_END();
3891 }
3892 else
3893 {
3894 /*
3895 * 256-bit: vzeroall
3896 */
3897 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3898 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3899
3900 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3901 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3902 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3903
3904 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3905 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3906 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3907 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3908 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3909 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3910 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3911 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3912 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3913
3914 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3915 {
3916 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3917 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3918 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3919 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3920 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3921 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3922 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3923 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3924 }
3925
3926 IEM_MC_ADVANCE_RIP_AND_FINISH();
3927 IEM_MC_END();
3928 }
3929}
3930
3931
3932/* Opcode VEX.66.0F 0x77 - invalid */
3933/* Opcode VEX.F3.0F 0x77 - invalid */
3934/* Opcode VEX.F2.0F 0x77 - invalid */
3935
3936/* Opcode VEX.0F 0x78 - invalid */
3937/* Opcode VEX.66.0F 0x78 - invalid */
3938/* Opcode VEX.F3.0F 0x78 - invalid */
3939/* Opcode VEX.F2.0F 0x78 - invalid */
3940
3941/* Opcode VEX.0F 0x79 - invalid */
3942/* Opcode VEX.66.0F 0x79 - invalid */
3943/* Opcode VEX.F3.0F 0x79 - invalid */
3944/* Opcode VEX.F2.0F 0x79 - invalid */
3945
3946/* Opcode VEX.0F 0x7a - invalid */
3947/* Opcode VEX.66.0F 0x7a - invalid */
3948/* Opcode VEX.F3.0F 0x7a - invalid */
3949/* Opcode VEX.F2.0F 0x7a - invalid */
3950
3951/* Opcode VEX.0F 0x7b - invalid */
3952/* Opcode VEX.66.0F 0x7b - invalid */
3953/* Opcode VEX.F3.0F 0x7b - invalid */
3954/* Opcode VEX.F2.0F 0x7b - invalid */
3955
3956/* Opcode VEX.0F 0x7c - invalid */
3957/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3958FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3959/* Opcode VEX.F3.0F 0x7c - invalid */
3960/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3961FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3962
3963/* Opcode VEX.0F 0x7d - invalid */
3964/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3965FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3966/* Opcode VEX.F3.0F 0x7d - invalid */
3967/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3968FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3969
3970
3971/* Opcode VEX.0F 0x7e - invalid */
3972
3973FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3974{
3975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3976 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3977 {
3978 /**
3979 * @opcode 0x7e
3980 * @opcodesub rex.w=1
3981 * @oppfx 0x66
3982 * @opcpuid avx
3983 * @opgroup og_avx_simdint_datamov
3984 * @opxcpttype 5
3985 * @optest 64-bit / op1=1 op2=2 -> op1=2
3986 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3987 */
3988 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3989 if (IEM_IS_MODRM_REG_MODE(bRm))
3990 {
3991 /* greg64, XMM */
3992 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3993 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3994 IEM_MC_LOCAL(uint64_t, u64Tmp);
3995
3996 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3997 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3998
3999 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4000 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4001
4002 IEM_MC_ADVANCE_RIP_AND_FINISH();
4003 IEM_MC_END();
4004 }
4005 else
4006 {
4007 /* [mem64], XMM */
4008 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
4009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4010 IEM_MC_LOCAL(uint64_t, u64Tmp);
4011
4012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4013 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4014 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4015 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4016
4017 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4018 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4019
4020 IEM_MC_ADVANCE_RIP_AND_FINISH();
4021 IEM_MC_END();
4022 }
4023 }
4024 else
4025 {
4026 /**
4027 * @opdone
4028 * @opcode 0x7e
4029 * @opcodesub rex.w=0
4030 * @oppfx 0x66
4031 * @opcpuid avx
4032 * @opgroup og_avx_simdint_datamov
4033 * @opxcpttype 5
4034 * @opfunction iemOp_vmovd_q_Vy_Ey
4035 * @optest op1=1 op2=2 -> op1=2
4036 * @optest op1=0 op2=-42 -> op1=-42
4037 */
4038 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4039 if (IEM_IS_MODRM_REG_MODE(bRm))
4040 {
4041 /* greg32, XMM */
4042 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4043 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4044 IEM_MC_LOCAL(uint32_t, u32Tmp);
4045
4046 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4047 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4048
4049 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4050 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4051
4052 IEM_MC_ADVANCE_RIP_AND_FINISH();
4053 IEM_MC_END();
4054 }
4055 else
4056 {
4057 /* [mem32], XMM */
4058 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4060 IEM_MC_LOCAL(uint32_t, u32Tmp);
4061
4062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4066
4067 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4068 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4069
4070 IEM_MC_ADVANCE_RIP_AND_FINISH();
4071 IEM_MC_END();
4072 }
4073 }
4074}
4075
4076
4077/**
4078 * @opcode 0x7e
4079 * @oppfx 0xf3
4080 * @opcpuid avx
4081 * @opgroup og_avx_pcksclr_datamove
4082 * @opxcpttype none
4083 * @optest op1=1 op2=2 -> op1=2
4084 * @optest op1=0 op2=-42 -> op1=-42
4085 */
4086FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4087{
4088 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4090 if (IEM_IS_MODRM_REG_MODE(bRm))
4091 {
4092 /*
4093 * Register, register.
4094 */
4095 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4096 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4097
4098 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4099 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4100
4101 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4102 IEM_GET_MODRM_RM(pVCpu, bRm));
4103 IEM_MC_ADVANCE_RIP_AND_FINISH();
4104 IEM_MC_END();
4105 }
4106 else
4107 {
4108 /*
4109 * Memory, register.
4110 */
4111 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4112 IEM_MC_LOCAL(uint64_t, uSrc);
4113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4114
4115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4116 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4117 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4118 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4119
4120 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4121 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4122
4123 IEM_MC_ADVANCE_RIP_AND_FINISH();
4124 IEM_MC_END();
4125 }
4126
4127}
4128/* Opcode VEX.F2.0F 0x7e - invalid */
4129
4130
4131/* Opcode VEX.0F 0x7f - invalid */
4132
4133/**
4134 * @opcode 0x7f
4135 * @oppfx 0x66
4136 * @opcpuid avx
4137 * @opgroup og_avx_simdint_datamove
4138 * @opxcpttype 1
4139 * @optest op1=1 op2=2 -> op1=2
4140 * @optest op1=0 op2=-42 -> op1=-42
4141 */
4142FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4143{
4144 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4145 Assert(pVCpu->iem.s.uVexLength <= 1);
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if (IEM_IS_MODRM_REG_MODE(bRm))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4153 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4154
4155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4156 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4157 if (pVCpu->iem.s.uVexLength == 0)
4158 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4159 IEM_GET_MODRM_REG(pVCpu, bRm));
4160 else
4161 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4162 IEM_GET_MODRM_REG(pVCpu, bRm));
4163 IEM_MC_ADVANCE_RIP_AND_FINISH();
4164 IEM_MC_END();
4165 }
4166 else if (pVCpu->iem.s.uVexLength == 0)
4167 {
4168 /*
4169 * Register, memory128.
4170 */
4171 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4172 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4174
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4176 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4177 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4178 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4179
4180 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4181 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4182
4183 IEM_MC_ADVANCE_RIP_AND_FINISH();
4184 IEM_MC_END();
4185 }
4186 else
4187 {
4188 /*
4189 * Register, memory256.
4190 */
4191 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4192 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4194
4195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4196 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4198 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4199
4200 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4201 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4202
4203 IEM_MC_ADVANCE_RIP_AND_FINISH();
4204 IEM_MC_END();
4205 }
4206}
4207
4208
4209/**
4210 * @opcode 0x7f
4211 * @oppfx 0xf3
4212 * @opcpuid avx
4213 * @opgroup og_avx_simdint_datamove
4214 * @opxcpttype 4UA
4215 * @optest op1=1 op2=2 -> op1=2
4216 * @optest op1=0 op2=-42 -> op1=-42
4217 */
4218FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4219{
4220 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4221 Assert(pVCpu->iem.s.uVexLength <= 1);
4222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4223 if (IEM_IS_MODRM_REG_MODE(bRm))
4224 {
4225 /*
4226 * Register, register.
4227 */
4228 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4229 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4230
4231 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4232 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4233 if (pVCpu->iem.s.uVexLength == 0)
4234 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4235 IEM_GET_MODRM_REG(pVCpu, bRm));
4236 else
4237 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4238 IEM_GET_MODRM_REG(pVCpu, bRm));
4239 IEM_MC_ADVANCE_RIP_AND_FINISH();
4240 IEM_MC_END();
4241 }
4242 else if (pVCpu->iem.s.uVexLength == 0)
4243 {
4244 /*
4245 * Register, memory128.
4246 */
4247 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4248 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4250
4251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4255
4256 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4257 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4258
4259 IEM_MC_ADVANCE_RIP_AND_FINISH();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /*
4265 * Register, memory256.
4266 */
4267 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4268 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4270
4271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4272 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4273 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4274 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4275
4276 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4277 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4278
4279 IEM_MC_ADVANCE_RIP_AND_FINISH();
4280 IEM_MC_END();
4281 }
4282}
4283
4284/* Opcode VEX.F2.0F 0x7f - invalid */
4285
4286
4287/* Opcode VEX.0F 0x80 - invalid */
4288/* Opcode VEX.0F 0x81 - invalid */
4289/* Opcode VEX.0F 0x82 - invalid */
4290/* Opcode VEX.0F 0x83 - invalid */
4291/* Opcode VEX.0F 0x84 - invalid */
4292/* Opcode VEX.0F 0x85 - invalid */
4293/* Opcode VEX.0F 0x86 - invalid */
4294/* Opcode VEX.0F 0x87 - invalid */
4295/* Opcode VEX.0F 0x88 - invalid */
4296/* Opcode VEX.0F 0x89 - invalid */
4297/* Opcode VEX.0F 0x8a - invalid */
4298/* Opcode VEX.0F 0x8b - invalid */
4299/* Opcode VEX.0F 0x8c - invalid */
4300/* Opcode VEX.0F 0x8d - invalid */
4301/* Opcode VEX.0F 0x8e - invalid */
4302/* Opcode VEX.0F 0x8f - invalid */
4303/* Opcode VEX.0F 0x90 - invalid */
4304/* Opcode VEX.0F 0x91 - invalid */
4305/* Opcode VEX.0F 0x92 - invalid */
4306/* Opcode VEX.0F 0x93 - invalid */
4307/* Opcode VEX.0F 0x94 - invalid */
4308/* Opcode VEX.0F 0x95 - invalid */
4309/* Opcode VEX.0F 0x96 - invalid */
4310/* Opcode VEX.0F 0x97 - invalid */
4311/* Opcode VEX.0F 0x98 - invalid */
4312/* Opcode VEX.0F 0x99 - invalid */
4313/* Opcode VEX.0F 0x9a - invalid */
4314/* Opcode VEX.0F 0x9b - invalid */
4315/* Opcode VEX.0F 0x9c - invalid */
4316/* Opcode VEX.0F 0x9d - invalid */
4317/* Opcode VEX.0F 0x9e - invalid */
4318/* Opcode VEX.0F 0x9f - invalid */
4319/* Opcode VEX.0F 0xa0 - invalid */
4320/* Opcode VEX.0F 0xa1 - invalid */
4321/* Opcode VEX.0F 0xa2 - invalid */
4322/* Opcode VEX.0F 0xa3 - invalid */
4323/* Opcode VEX.0F 0xa4 - invalid */
4324/* Opcode VEX.0F 0xa5 - invalid */
4325/* Opcode VEX.0F 0xa6 - invalid */
4326/* Opcode VEX.0F 0xa7 - invalid */
4327/* Opcode VEX.0F 0xa8 - invalid */
4328/* Opcode VEX.0F 0xa9 - invalid */
4329/* Opcode VEX.0F 0xaa - invalid */
4330/* Opcode VEX.0F 0xab - invalid */
4331/* Opcode VEX.0F 0xac - invalid */
4332/* Opcode VEX.0F 0xad - invalid */
4333
4334
4335/* Opcode VEX.0F 0xae mem/0 - invalid. */
4336/* Opcode VEX.0F 0xae mem/1 - invalid. */
4337
4338/**
4339 * @ opmaps grp15
4340 * @ opcode !11/2
4341 * @ oppfx none
4342 * @ opcpuid sse
4343 * @ opgroup og_sse_mxcsrsm
4344 * @ opxcpttype 5
4345 * @ optest op1=0 -> mxcsr=0
4346 * @ optest op1=0x2083 -> mxcsr=0x2083
4347 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4348 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4349 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4350 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4351 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4352 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4353 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4354 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4355 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4356 */
4357FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4358//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4359//{
4360// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4361// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4362// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4363// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4364// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4365// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4366// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4367// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4368// IEM_MC_END();
4369// return VINF_SUCCESS;
4370//}
4371
4372
4373/**
4374 * @opmaps vexgrp15
4375 * @opcode !11/3
4376 * @oppfx none
4377 * @opcpuid avx
4378 * @opgroup og_avx_mxcsrsm
4379 * @opxcpttype 5
4380 * @optest mxcsr=0 -> op1=0
4381 * @optest mxcsr=0x2083 -> op1=0x2083
4382 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4383 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4384 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4385 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4386 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4387 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4388 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4389 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4390 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4391 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4392 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4393 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4394 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4395 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4396 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4397 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4398 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4399 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4400 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4401 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4402 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4403 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4404 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4405 * -> value.xcpt=0x6
4406 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4407 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4408 * APMv4 rev 3.17 page 509.
4409 * @todo Test this instruction on AMD Ryzen.
4410 */
4411FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4412{
4413 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4415 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4417 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4419 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4420 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4421 IEM_MC_END();
4422}
4423
4424/* Opcode VEX.0F 0xae mem/4 - invalid. */
4425/* Opcode VEX.0F 0xae mem/5 - invalid. */
4426/* Opcode VEX.0F 0xae mem/6 - invalid. */
4427/* Opcode VEX.0F 0xae mem/7 - invalid. */
4428
4429/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4430/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4431/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4432/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4433/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4434/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4435/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4436/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4437
4438/**
4439 * Vex group 15 jump table for memory variant.
4440 */
4441IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4442{ /* pfx: none, 066h, 0f3h, 0f2h */
4443 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4444 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4445 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4446 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4447 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4448 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4449 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4450 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4451};
4452AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4453
4454
4455/** Opcode vex. 0xae. */
4456FNIEMOP_DEF(iemOp_VGrp15)
4457{
4458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4459 if (IEM_IS_MODRM_REG_MODE(bRm))
4460 /* register, register */
4461 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4462
4463 /* memory, register */
4464 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4465 + pVCpu->iem.s.idxPrefix], bRm);
4466}
4467
4468
4469/* Opcode VEX.0F 0xaf - invalid. */
4470
4471/* Opcode VEX.0F 0xb0 - invalid. */
4472/* Opcode VEX.0F 0xb1 - invalid. */
4473/* Opcode VEX.0F 0xb2 - invalid. */
4474/* Opcode VEX.0F 0xb2 - invalid. */
4475/* Opcode VEX.0F 0xb3 - invalid. */
4476/* Opcode VEX.0F 0xb4 - invalid. */
4477/* Opcode VEX.0F 0xb5 - invalid. */
4478/* Opcode VEX.0F 0xb6 - invalid. */
4479/* Opcode VEX.0F 0xb7 - invalid. */
4480/* Opcode VEX.0F 0xb8 - invalid. */
4481/* Opcode VEX.0F 0xb9 - invalid. */
4482/* Opcode VEX.0F 0xba - invalid. */
4483/* Opcode VEX.0F 0xbb - invalid. */
4484/* Opcode VEX.0F 0xbc - invalid. */
4485/* Opcode VEX.0F 0xbd - invalid. */
4486/* Opcode VEX.0F 0xbe - invalid. */
4487/* Opcode VEX.0F 0xbf - invalid. */
4488
4489/* Opcode VEX.0F 0xc0 - invalid. */
4490/* Opcode VEX.66.0F 0xc0 - invalid. */
4491/* Opcode VEX.F3.0F 0xc0 - invalid. */
4492/* Opcode VEX.F2.0F 0xc0 - invalid. */
4493
4494/* Opcode VEX.0F 0xc1 - invalid. */
4495/* Opcode VEX.66.0F 0xc1 - invalid. */
4496/* Opcode VEX.F3.0F 0xc1 - invalid. */
4497/* Opcode VEX.F2.0F 0xc1 - invalid. */
4498
4499/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4500FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4501/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4502FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4503/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4504FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4505/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4506FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4507
4508/* Opcode VEX.0F 0xc3 - invalid */
4509/* Opcode VEX.66.0F 0xc3 - invalid */
4510/* Opcode VEX.F3.0F 0xc3 - invalid */
4511/* Opcode VEX.F2.0F 0xc3 - invalid */
4512
4513/* Opcode VEX.0F 0xc4 - invalid */
4514
4515
4516/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4517FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4518{
4519 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4521 if (IEM_IS_MODRM_REG_MODE(bRm))
4522 {
4523 /*
4524 * Register, register.
4525 */
4526 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4527 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4528 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4529 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4530 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4531 IEM_MC_ARG(uint16_t, u16Src, 2);
4532 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4534 IEM_MC_PREPARE_AVX_USAGE();
4535 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4536 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4537 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4538 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4539 puDst, puSrc, u16Src, bImmArg);
4540 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4541 IEM_MC_ADVANCE_RIP_AND_FINISH();
4542 IEM_MC_END();
4543 }
4544 else
4545 {
4546 /*
4547 * Register, memory.
4548 */
4549 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4551 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4552 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4553 IEM_MC_ARG(uint16_t, u16Src, 2);
4554
4555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4556 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4557 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4558 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4559 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4560 IEM_MC_PREPARE_AVX_USAGE();
4561
4562 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4563 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4564 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4565 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4566 puDst, puSrc, u16Src, bImmArg);
4567 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4568
4569 IEM_MC_ADVANCE_RIP_AND_FINISH();
4570 IEM_MC_END();
4571 }
4572}
4573
4574
4575/* Opcode VEX.F3.0F 0xc4 - invalid */
4576/* Opcode VEX.F2.0F 0xc4 - invalid */
4577
4578/* Opcode VEX.0F 0xc5 - invlid */
4579
4580
4581/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4582FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4583{
4584 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586 if (IEM_IS_MODRM_REG_MODE(bRm))
4587 {
4588 /*
4589 * Register, register.
4590 */
4591 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4592 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4593 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4594 IEM_MC_LOCAL(uint16_t, u16Dst);
4595 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4596 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4597 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4598 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4599 IEM_MC_PREPARE_AVX_USAGE();
4600 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4601 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4602 pu16Dst, puSrc, bImmArg);
4603 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4604 IEM_MC_ADVANCE_RIP_AND_FINISH();
4605 IEM_MC_END();
4606 }
4607 /* No memory operand. */
4608 else
4609 IEMOP_RAISE_INVALID_OPCODE_RET();
4610}
4611
4612
4613/* Opcode VEX.F3.0F 0xc5 - invalid */
4614/* Opcode VEX.F2.0F 0xc5 - invalid */
4615
4616
4617#define VSHUFP_X(a_Instr) \
4618 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register, register. \
4623 */ \
4624 if (pVCpu->iem.s.uVexLength) \
4625 { \
4626 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4627 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4628 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4629 IEM_MC_LOCAL(RTUINT256U, uDst); \
4630 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4631 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4632 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4633 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4634 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4635 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4637 IEM_MC_PREPARE_AVX_USAGE(); \
4638 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4639 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4640 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4641 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4642 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4643 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4644 IEM_MC_END(); \
4645 } \
4646 else \
4647 { \
4648 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4649 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4650 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4651 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4652 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4653 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4654 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4656 IEM_MC_PREPARE_AVX_USAGE(); \
4657 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4658 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4659 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4660 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4661 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4662 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4664 IEM_MC_END(); \
4665 } \
4666 } \
4667 else \
4668 { \
4669 /* \
4670 * Register, memory. \
4671 */ \
4672 if (pVCpu->iem.s.uVexLength) \
4673 { \
4674 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4675 IEM_MC_LOCAL(RTUINT256U, uDst); \
4676 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4677 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4679 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4680 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4681 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4683 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4684 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4685 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4687 IEM_MC_PREPARE_AVX_USAGE(); \
4688 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4689 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4690 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4691 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4692 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4693 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4694 IEM_MC_END(); \
4695 } \
4696 else \
4697 { \
4698 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4699 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4701 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4702 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4703 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4705 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4706 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4707 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4708 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4709 IEM_MC_PREPARE_AVX_USAGE(); \
4710 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4711 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4712 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4713 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4714 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4715 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4716 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4717 IEM_MC_END(); \
4718 } \
4719 } \
4720 (void)0
4721
4722/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4723FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4724{
4725 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4726 VSHUFP_X(vshufps);
4727}
4728
4729
4730/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4731FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4732{
4733 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4734 VSHUFP_X(vshufpd);
4735}
4736#undef VSHUFP_X
4737
4738
4739/* Opcode VEX.F3.0F 0xc6 - invalid */
4740/* Opcode VEX.F2.0F 0xc6 - invalid */
4741
4742/* Opcode VEX.0F 0xc7 - invalid */
4743/* Opcode VEX.66.0F 0xc7 - invalid */
4744/* Opcode VEX.F3.0F 0xc7 - invalid */
4745/* Opcode VEX.F2.0F 0xc7 - invalid */
4746
4747/* Opcode VEX.0F 0xc8 - invalid */
4748/* Opcode VEX.0F 0xc9 - invalid */
4749/* Opcode VEX.0F 0xca - invalid */
4750/* Opcode VEX.0F 0xcb - invalid */
4751/* Opcode VEX.0F 0xcc - invalid */
4752/* Opcode VEX.0F 0xcd - invalid */
4753/* Opcode VEX.0F 0xce - invalid */
4754/* Opcode VEX.0F 0xcf - invalid */
4755
4756
4757/* Opcode VEX.0F 0xd0 - invalid */
4758/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4759FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4760/* Opcode VEX.F3.0F 0xd0 - invalid */
4761/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4762FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4763
4764/* Opcode VEX.0F 0xd1 - invalid */
4765/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4766FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4767/* Opcode VEX.F3.0F 0xd1 - invalid */
4768/* Opcode VEX.F2.0F 0xd1 - invalid */
4769
4770/* Opcode VEX.0F 0xd2 - invalid */
4771/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4772FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4773/* Opcode VEX.F3.0F 0xd2 - invalid */
4774/* Opcode VEX.F2.0F 0xd2 - invalid */
4775
4776/* Opcode VEX.0F 0xd3 - invalid */
4777/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4778FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4779/* Opcode VEX.F3.0F 0xd3 - invalid */
4780/* Opcode VEX.F2.0F 0xd3 - invalid */
4781
4782/* Opcode VEX.0F 0xd4 - invalid */
4783
4784
4785/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4786FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4787{
4788 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4789 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4790 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4791}
4792
4793
4794/* Opcode VEX.F3.0F 0xd4 - invalid */
4795/* Opcode VEX.F2.0F 0xd4 - invalid */
4796
4797/* Opcode VEX.0F 0xd5 - invalid */
4798
4799
4800/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4801FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4802{
4803 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4804 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4805 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4806}
4807
4808
4809/* Opcode VEX.F3.0F 0xd5 - invalid */
4810/* Opcode VEX.F2.0F 0xd5 - invalid */
4811
4812/* Opcode VEX.0F 0xd6 - invalid */
4813
4814/**
4815 * @opcode 0xd6
4816 * @oppfx 0x66
4817 * @opcpuid avx
4818 * @opgroup og_avx_pcksclr_datamove
4819 * @opxcpttype none
4820 * @optest op1=-1 op2=2 -> op1=2
4821 * @optest op1=0 op2=-42 -> op1=-42
4822 */
4823FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4824{
4825 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4827 if (IEM_IS_MODRM_REG_MODE(bRm))
4828 {
4829 /*
4830 * Register, register.
4831 */
4832 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4833 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4834
4835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4837
4838 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4839 IEM_GET_MODRM_REG(pVCpu, bRm));
4840 IEM_MC_ADVANCE_RIP_AND_FINISH();
4841 IEM_MC_END();
4842 }
4843 else
4844 {
4845 /*
4846 * Memory, register.
4847 */
4848 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4849 IEM_MC_LOCAL(uint64_t, uSrc);
4850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4851
4852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4853 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4854 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4855 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4856
4857 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4858 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4859
4860 IEM_MC_ADVANCE_RIP_AND_FINISH();
4861 IEM_MC_END();
4862 }
4863}
4864
4865/* Opcode VEX.F3.0F 0xd6 - invalid */
4866/* Opcode VEX.F2.0F 0xd6 - invalid */
4867
4868
4869/* Opcode VEX.0F 0xd7 - invalid */
4870
4871/** Opcode VEX.66.0F 0xd7 - */
4872FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4873{
4874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4875 /* Docs says register only. */
4876 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4877 {
4878 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4879 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4880 if (pVCpu->iem.s.uVexLength)
4881 {
4882 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4883 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4884 IEM_MC_ARG(uint64_t *, puDst, 0);
4885 IEM_MC_LOCAL(RTUINT256U, uSrc);
4886 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4888 IEM_MC_PREPARE_AVX_USAGE();
4889 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4890 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4891 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4892 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4893 IEM_MC_ADVANCE_RIP_AND_FINISH();
4894 IEM_MC_END();
4895 }
4896 else
4897 {
4898 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4899 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4900 IEM_MC_ARG(uint64_t *, puDst, 0);
4901 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4902 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4903 IEM_MC_PREPARE_AVX_USAGE();
4904 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4905 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4906 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4907 IEM_MC_ADVANCE_RIP_AND_FINISH();
4908 IEM_MC_END();
4909 }
4910 }
4911 else
4912 IEMOP_RAISE_INVALID_OPCODE_RET();
4913}
4914
4915
4916/* Opcode VEX.F3.0F 0xd7 - invalid */
4917/* Opcode VEX.F2.0F 0xd7 - invalid */
4918
4919
4920/* Opcode VEX.0F 0xd8 - invalid */
4921
4922/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
4923FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
4924{
4925 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4926 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
4927 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4928}
4929
4930
4931/* Opcode VEX.F3.0F 0xd8 - invalid */
4932/* Opcode VEX.F2.0F 0xd8 - invalid */
4933
4934/* Opcode VEX.0F 0xd9 - invalid */
4935
4936
4937/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4938FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
4939{
4940 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4941 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
4942 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4943}
4944
4945
4946/* Opcode VEX.F3.0F 0xd9 - invalid */
4947/* Opcode VEX.F2.0F 0xd9 - invalid */
4948
4949/* Opcode VEX.0F 0xda - invalid */
4950
4951
4952/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4953FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4954{
4955 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4956 IEMOPMEDIAF3_INIT_VARS(vpminub);
4957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4958}
4959
4960
4961/* Opcode VEX.F3.0F 0xda - invalid */
4962/* Opcode VEX.F2.0F 0xda - invalid */
4963
4964/* Opcode VEX.0F 0xdb - invalid */
4965
4966
4967/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4968FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4969{
4970 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4971 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4972 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4973}
4974
4975
4976/* Opcode VEX.F3.0F 0xdb - invalid */
4977/* Opcode VEX.F2.0F 0xdb - invalid */
4978
4979/* Opcode VEX.0F 0xdc - invalid */
4980
4981
4982/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4983FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
4984{
4985 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4986 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
4987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4988}
4989
4990
4991/* Opcode VEX.F3.0F 0xdc - invalid */
4992/* Opcode VEX.F2.0F 0xdc - invalid */
4993
4994/* Opcode VEX.0F 0xdd - invalid */
4995
4996
4997/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4998FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
4999{
5000 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5001 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5002 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5003}
5004
5005
5006/* Opcode VEX.F3.0F 0xdd - invalid */
5007/* Opcode VEX.F2.0F 0xdd - invalid */
5008
5009/* Opcode VEX.0F 0xde - invalid */
5010
5011
5012/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5013FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5014{
5015 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5016 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
5017 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5018}
5019
5020
5021/* Opcode VEX.F3.0F 0xde - invalid */
5022/* Opcode VEX.F2.0F 0xde - invalid */
5023
5024/* Opcode VEX.0F 0xdf - invalid */
5025
5026
5027/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5028FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5029{
5030 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5031 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5032 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5033}
5034
5035
5036/* Opcode VEX.F3.0F 0xdf - invalid */
5037/* Opcode VEX.F2.0F 0xdf - invalid */
5038
5039/* Opcode VEX.0F 0xe0 - invalid */
5040
5041
5042/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5043FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5044{
5045 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5046 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5047 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5048}
5049
5050
5051/* Opcode VEX.F3.0F 0xe0 - invalid */
5052/* Opcode VEX.F2.0F 0xe0 - invalid */
5053
5054/* Opcode VEX.0F 0xe1 - invalid */
5055/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5056FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
5057/* Opcode VEX.F3.0F 0xe1 - invalid */
5058/* Opcode VEX.F2.0F 0xe1 - invalid */
5059
5060/* Opcode VEX.0F 0xe2 - invalid */
5061/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5062FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
5063/* Opcode VEX.F3.0F 0xe2 - invalid */
5064/* Opcode VEX.F2.0F 0xe2 - invalid */
5065
5066/* Opcode VEX.0F 0xe3 - invalid */
5067
5068
5069/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5070FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5071{
5072 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5073 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5074 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5075}
5076
5077
5078/* Opcode VEX.F3.0F 0xe3 - invalid */
5079/* Opcode VEX.F2.0F 0xe3 - invalid */
5080
5081/* Opcode VEX.0F 0xe4 - invalid */
5082
5083
5084/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5085FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5086{
5087 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5088 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5089 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5090}
5091
5092
5093/* Opcode VEX.F3.0F 0xe4 - invalid */
5094/* Opcode VEX.F2.0F 0xe4 - invalid */
5095
5096/* Opcode VEX.0F 0xe5 - invalid */
5097
5098
5099/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5100FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5101{
5102 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5103 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5105}
5106
5107
5108/* Opcode VEX.F3.0F 0xe5 - invalid */
5109/* Opcode VEX.F2.0F 0xe5 - invalid */
5110
5111/* Opcode VEX.0F 0xe6 - invalid */
5112/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5113FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5114/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5115FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5116/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5117FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5118
5119
5120/* Opcode VEX.0F 0xe7 - invalid */
5121
5122/**
5123 * @opcode 0xe7
5124 * @opcodesub !11 mr/reg
5125 * @oppfx 0x66
5126 * @opcpuid avx
5127 * @opgroup og_avx_cachect
5128 * @opxcpttype 1
5129 * @optest op1=-1 op2=2 -> op1=2
5130 * @optest op1=0 op2=-42 -> op1=-42
5131 */
5132FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5133{
5134 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5135 Assert(pVCpu->iem.s.uVexLength <= 1);
5136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5137 if (IEM_IS_MODRM_MEM_MODE(bRm))
5138 {
5139 if (pVCpu->iem.s.uVexLength == 0)
5140 {
5141 /*
5142 * 128-bit: Memory, register.
5143 */
5144 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5145 IEM_MC_LOCAL(RTUINT128U, uSrc);
5146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5147
5148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5149 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5150 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5151 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5152
5153 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5154 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5155
5156 IEM_MC_ADVANCE_RIP_AND_FINISH();
5157 IEM_MC_END();
5158 }
5159 else
5160 {
5161 /*
5162 * 256-bit: Memory, register.
5163 */
5164 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5165 IEM_MC_LOCAL(RTUINT256U, uSrc);
5166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5167
5168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5169 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5170 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5171 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5172
5173 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5174 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5175
5176 IEM_MC_ADVANCE_RIP_AND_FINISH();
5177 IEM_MC_END();
5178 }
5179 }
5180 /**
5181 * @opdone
5182 * @opmnemonic udvex660fe7reg
5183 * @opcode 0xe7
5184 * @opcodesub 11 mr/reg
5185 * @oppfx 0x66
5186 * @opunused immediate
5187 * @opcpuid avx
5188 * @optest ->
5189 */
5190 else
5191 IEMOP_RAISE_INVALID_OPCODE_RET();
5192}
5193
5194/* Opcode VEX.F3.0F 0xe7 - invalid */
5195/* Opcode VEX.F2.0F 0xe7 - invalid */
5196
5197
5198/* Opcode VEX.0F 0xe8 - invalid */
5199
5200
5201/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5202FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5203{
5204 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5205 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5206 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5207}
5208
5209
5210/* Opcode VEX.F3.0F 0xe8 - invalid */
5211/* Opcode VEX.F2.0F 0xe8 - invalid */
5212
5213/* Opcode VEX.0F 0xe9 - invalid */
5214
5215
5216/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5217FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5218{
5219 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5220 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5221 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5222}
5223
5224
5225/* Opcode VEX.F3.0F 0xe9 - invalid */
5226/* Opcode VEX.F2.0F 0xe9 - invalid */
5227
5228/* Opcode VEX.0F 0xea - invalid */
5229
5230
5231/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5232FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5233{
5234 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5235 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5236 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5237}
5238
5239
5240/* Opcode VEX.F3.0F 0xea - invalid */
5241/* Opcode VEX.F2.0F 0xea - invalid */
5242
5243/* Opcode VEX.0F 0xeb - invalid */
5244
5245
5246/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5247FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5248{
5249 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5250 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5251 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5252}
5253
5254
5255
5256/* Opcode VEX.F3.0F 0xeb - invalid */
5257/* Opcode VEX.F2.0F 0xeb - invalid */
5258
5259/* Opcode VEX.0F 0xec - invalid */
5260
5261
5262/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5263FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5264{
5265 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5266 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5267 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5268}
5269
5270
5271/* Opcode VEX.F3.0F 0xec - invalid */
5272/* Opcode VEX.F2.0F 0xec - invalid */
5273
5274/* Opcode VEX.0F 0xed - invalid */
5275
5276
5277/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5278FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5279{
5280 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5281 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5282 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5283}
5284
5285
5286/* Opcode VEX.F3.0F 0xed - invalid */
5287/* Opcode VEX.F2.0F 0xed - invalid */
5288
5289/* Opcode VEX.0F 0xee - invalid */
5290
5291
5292/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5293FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5294{
5295 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5296 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5297 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5298}
5299
5300
5301/* Opcode VEX.F3.0F 0xee - invalid */
5302/* Opcode VEX.F2.0F 0xee - invalid */
5303
5304
5305/* Opcode VEX.0F 0xef - invalid */
5306
5307
5308/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5309FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5310{
5311 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5312 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5313 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5314}
5315
5316
5317/* Opcode VEX.F3.0F 0xef - invalid */
5318/* Opcode VEX.F2.0F 0xef - invalid */
5319
5320/* Opcode VEX.0F 0xf0 - invalid */
5321/* Opcode VEX.66.0F 0xf0 - invalid */
5322
5323
5324/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5325FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5326{
5327 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5328 Assert(pVCpu->iem.s.uVexLength <= 1);
5329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5330 if (IEM_IS_MODRM_REG_MODE(bRm))
5331 {
5332 /*
5333 * Register, register - (not implemented, assuming it raises \#UD).
5334 */
5335 IEMOP_RAISE_INVALID_OPCODE_RET();
5336 }
5337 else if (pVCpu->iem.s.uVexLength == 0)
5338 {
5339 /*
5340 * Register, memory128.
5341 */
5342 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5343 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5345
5346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5347 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5348 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5349 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5350
5351 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5352 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5353
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357 else
5358 {
5359 /*
5360 * Register, memory256.
5361 */
5362 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5363 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5365
5366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5367 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5370
5371 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5372 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5373
5374 IEM_MC_ADVANCE_RIP_AND_FINISH();
5375 IEM_MC_END();
5376 }
5377}
5378
5379
5380/* Opcode VEX.0F 0xf1 - invalid */
5381/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5382FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5383{
5384 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5385 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5386 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5387}
5388
5389/* Opcode VEX.F2.0F 0xf1 - invalid */
5390
5391/* Opcode VEX.0F 0xf2 - invalid */
5392/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5393FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5394{
5395 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5396 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5397 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5398}
5399/* Opcode VEX.F2.0F 0xf2 - invalid */
5400
5401/* Opcode VEX.0F 0xf3 - invalid */
5402/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5403FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5404{
5405 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5406 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5407 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5408}
5409/* Opcode VEX.F2.0F 0xf3 - invalid */
5410
5411/* Opcode VEX.0F 0xf4 - invalid */
5412
5413
5414/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5415FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5416{
5417 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5418 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5420}
5421
5422
5423/* Opcode VEX.F2.0F 0xf4 - invalid */
5424
5425/* Opcode VEX.0F 0xf5 - invalid */
5426/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5427FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5428/* Opcode VEX.F2.0F 0xf5 - invalid */
5429
5430/* Opcode VEX.0F 0xf6 - invalid */
5431
5432
5433/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5434FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5435{
5436 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5437 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5438 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5439}
5440
5441
5442/* Opcode VEX.F2.0F 0xf6 - invalid */
5443
5444/* Opcode VEX.0F 0xf7 - invalid */
5445/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5446FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5447/* Opcode VEX.F2.0F 0xf7 - invalid */
5448
5449/* Opcode VEX.0F 0xf8 - invalid */
5450
5451
5452/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5453FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5454{
5455 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5456 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5457 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5458}
5459
5460
5461/* Opcode VEX.F2.0F 0xf8 - invalid */
5462
5463/* Opcode VEX.0F 0xf9 - invalid */
5464
5465
5466/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5467FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5468{
5469 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5470 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5471 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5472}
5473
5474
5475/* Opcode VEX.F2.0F 0xf9 - invalid */
5476
5477/* Opcode VEX.0F 0xfa - invalid */
5478
5479
5480/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5481FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5482{
5483 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5484 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5485 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5486}
5487
5488
5489/* Opcode VEX.F2.0F 0xfa - invalid */
5490
5491/* Opcode VEX.0F 0xfb - invalid */
5492
5493
5494/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5495FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5496{
5497 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5498 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5499 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5500}
5501
5502
5503/* Opcode VEX.F2.0F 0xfb - invalid */
5504
5505/* Opcode VEX.0F 0xfc - invalid */
5506
5507
5508/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5509FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5510{
5511 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5512 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5513 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5514}
5515
5516
5517/* Opcode VEX.F2.0F 0xfc - invalid */
5518
5519/* Opcode VEX.0F 0xfd - invalid */
5520
5521
5522/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5523FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5524{
5525 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5526 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5528}
5529
5530
5531/* Opcode VEX.F2.0F 0xfd - invalid */
5532
5533/* Opcode VEX.0F 0xfe - invalid */
5534
5535
5536/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5537FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5538{
5539 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5540 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5541 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5542}
5543
5544
5545/* Opcode VEX.F2.0F 0xfe - invalid */
5546
5547
5548/** Opcode **** 0x0f 0xff - UD0 */
5549FNIEMOP_DEF(iemOp_vud0)
5550{
5551/** @todo testcase: vud0 */
5552 IEMOP_MNEMONIC(vud0, "vud0");
5553 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5554 {
5555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5556 if (IEM_IS_MODRM_MEM_MODE(bRm))
5557 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5558 }
5559 IEMOP_HLP_DONE_DECODING();
5560 IEMOP_RAISE_INVALID_OPCODE_RET();
5561}
5562
5563
5564
5565/**
5566 * VEX opcode map \#1.
5567 *
5568 * @sa g_apfnTwoByteMap
5569 */
5570const PFNIEMOP g_apfnVexMap1[] =
5571{
5572 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5573 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5574 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5575 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5576 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5577 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5578 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5579 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5580 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5581 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5582 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5583 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5584 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5585 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5586 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5587 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5588 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5589
5590 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5591 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5592 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5593 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5594 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5595 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5596 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5597 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5598 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5599 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5600 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5601 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5602 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5603 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5604 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5605 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5606
5607 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5608 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5609 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5610 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5611 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5612 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5613 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5614 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5615 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5616 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5617 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5618 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5619 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5620 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5621 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5622 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5623
5624 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5625 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5626 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5627 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5628 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5629 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5630 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5631 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5632 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5633 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5634 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5635 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5636 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5637 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5638 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5639 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5640
5641 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5642 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5643 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5644 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5645 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5646 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5647 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5648 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5649 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5650 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5651 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5652 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5653 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5654 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5655 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5656 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5657
5658 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5659 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5660 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5661 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5662 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5663 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5664 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5665 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5666 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5667 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5668 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5669 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5670 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5671 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5672 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5673 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5674
5675 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5676 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5677 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5678 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5679 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5680 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5681 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5682 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5683 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5684 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5685 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5686 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5687 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5688 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5689 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5690 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5691
5692 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5693 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5694 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5695 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5696 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5697 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5698 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5699 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5700 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5701 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5702 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5703 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5704 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5705 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5706 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5707 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5708
5709 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5710 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5711 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5712 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5713 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5714 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5715 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5716 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5717 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5718 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5719 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5720 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5721 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5722 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5723 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5724 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5725
5726 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5727 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5728 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5729 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5730 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5731 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5733 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5734 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5735 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5736 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5737 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5738 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5739 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5740 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5741 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5742
5743 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5744 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5745 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5746 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5747 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5748 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5750 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5751 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5752 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5753 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5754 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5755 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5756 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5757 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5758 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5759
5760 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5761 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5762 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5763 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5764 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5765 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5766 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5767 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5768 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5769 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5770 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5771 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5772 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5773 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5774 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5775 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5776
5777 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5778 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5779 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5780 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5781 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5782 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5783 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5784 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5785 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5786 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5787 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5788 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5789 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5790 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5791 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5792 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5793
5794 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5795 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5796 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5797 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5798 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5799 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5800 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5801 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5802 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5803 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5804 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5805 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5806 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5807 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5808 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5809 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5810
5811 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5812 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5813 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5814 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5815 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5816 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5817 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5818 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5819 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5820 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5821 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5822 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5823 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5824 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5825 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5826 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5827
5828 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5829 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5830 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5831 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5832 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5833 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5834 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5835 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5836 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5837 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5838 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5839 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5840 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5841 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5842 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5843 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5844};
5845AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5846/** @} */
5847
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette