VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 104135

Last change on this file since 104135 was 104135, checked in by vboxsync, 13 months ago

VMM/IEM: Rework MXCSR handling for AVX instructions. See r162550 for an explanation, bugref:10641

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 221.5 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 104135 2024-04-03 13:18:38Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36#if 0 /*Unused*/
37/**
38 * Common worker for AVX2 instructions on the forms:
39 * - vpxxx xmm0, xmm1, xmm2/mem128
40 * - vpxxx ymm0, ymm1, ymm2/mem256
41 *
42 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
43 */
44FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
45{
46 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
47 if (IEM_IS_MODRM_REG_MODE(bRm))
48 {
49 /*
50 * Register, register.
51 */
52 if (pVCpu->iem.s.uVexLength)
53 {
54 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
56 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
57 IEM_MC_PREPARE_AVX_USAGE();
58
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
61 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
62
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66
67 IEM_MC_LOCAL(RTUINT256U, uDst);
68 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
69
70 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
71
72 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
73 IEM_MC_ADVANCE_RIP_AND_FINISH();
74 IEM_MC_END();
75 }
76 else
77 {
78 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
81 IEM_MC_PREPARE_AVX_USAGE();
82
83 IEM_MC_ARG(PRTUINT128U, puDst, 0);
84 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
85 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
105 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
106 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
107 IEM_MC_PREPARE_AVX_USAGE();
108
109 IEM_MC_LOCAL(RTUINT256U, uSrc2);
110 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
111 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
112
113 IEM_MC_LOCAL(RTUINT256U, uSrc1);
114 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
115 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
116
117 IEM_MC_LOCAL(RTUINT256U, uDst);
118 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
119
120 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
121
122 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_LOCAL(RTUINT128U, uSrc2);
136 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
137 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
138
139 IEM_MC_ARG(PRTUINT128U, puDst, 0);
140 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
141 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
142 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
143
144 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152#endif
153
154/**
155 * Common worker for AVX2 instructions on the forms:
156 * - vpxxx xmm0, xmm1, xmm2/mem128
157 * - vpxxx ymm0, ymm1, ymm2/mem256
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
175 IEM_MC_LOCAL(RTUINT256U, uDst);
176 IEM_MC_LOCAL(RTUINT256U, uSrc1);
177 IEM_MC_LOCAL(RTUINT256U, uSrc2);
178 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
181 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
182 IEM_MC_PREPARE_AVX_USAGE();
183 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
184 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
186 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
193 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
194 IEM_MC_ARG(PRTUINT128U, puDst, 0);
195 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
196 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
198 IEM_MC_PREPARE_AVX_USAGE();
199 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
200 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
201 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
203 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
204 IEM_MC_ADVANCE_RIP_AND_FINISH();
205 IEM_MC_END();
206 }
207 }
208 else
209 {
210 /*
211 * Register, memory.
212 */
213 if (pVCpu->iem.s.uVexLength)
214 {
215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
216 IEM_MC_LOCAL(RTUINT256U, uDst);
217 IEM_MC_LOCAL(RTUINT256U, uSrc1);
218 IEM_MC_LOCAL(RTUINT256U, uSrc2);
219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
220 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
221 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
222 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
223
224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
225 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
227 IEM_MC_PREPARE_AVX_USAGE();
228
229 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
230 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
231 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
232 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
233
234 IEM_MC_ADVANCE_RIP_AND_FINISH();
235 IEM_MC_END();
236 }
237 else
238 {
239 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
240 IEM_MC_LOCAL(RTUINT128U, uSrc2);
241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
242 IEM_MC_ARG(PRTUINT128U, puDst, 0);
243 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
244 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
245
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
247 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
248 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
249 IEM_MC_PREPARE_AVX_USAGE();
250
251 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
252 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
253 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
255 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
256
257 IEM_MC_ADVANCE_RIP_AND_FINISH();
258 IEM_MC_END();
259 }
260 }
261}
262
263
264/**
265 * Common worker for AVX2 instructions on the forms:
266 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
267 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
268 *
269 * The 128-bit memory version of this instruction may elect to skip fetching the
270 * lower 64 bits of the operand. We, however, do not.
271 *
272 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
273 */
274FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
275{
276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
277}
278
279
280/**
281 * Common worker for AVX2 instructions on the forms:
282 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
283 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
284 *
285 * The 128-bit memory version of this instruction may elect to skip fetching the
286 * higher 64 bits of the operand. We, however, do not.
287 *
288 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
289 */
290FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
291{
292 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
293}
294
295
296/**
297 * Common worker for AVX2 instructions on the forms:
298 * - vpxxx xmm0, xmm1/mem128
299 * - vpxxx ymm0, ymm1/mem256
300 *
301 * Takes function table for function w/o implicit state parameter.
302 *
303 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
304 */
305FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
306{
307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
308 if (IEM_IS_MODRM_REG_MODE(bRm))
309 {
310 /*
311 * Register, register.
312 */
313 if (pVCpu->iem.s.uVexLength)
314 {
315 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
316 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
317 IEM_MC_LOCAL(RTUINT256U, uDst);
318 IEM_MC_LOCAL(RTUINT256U, uSrc);
319 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
320 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
322 IEM_MC_PREPARE_AVX_USAGE();
323 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
324 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
325 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
326 IEM_MC_ADVANCE_RIP_AND_FINISH();
327 IEM_MC_END();
328 }
329 else
330 {
331 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
332 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
333 IEM_MC_ARG(PRTUINT128U, puDst, 0);
334 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
335 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
336 IEM_MC_PREPARE_AVX_USAGE();
337 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
338 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
339 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
340 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
341 IEM_MC_ADVANCE_RIP_AND_FINISH();
342 IEM_MC_END();
343 }
344 }
345 else
346 {
347 /*
348 * Register, memory.
349 */
350 if (pVCpu->iem.s.uVexLength)
351 {
352 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
353 IEM_MC_LOCAL(RTUINT256U, uDst);
354 IEM_MC_LOCAL(RTUINT256U, uSrc);
355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
356 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
357 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
358
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
360 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
361 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
362 IEM_MC_PREPARE_AVX_USAGE();
363
364 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
365 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
366 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
367
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
374 IEM_MC_LOCAL(RTUINT128U, uSrc);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376 IEM_MC_ARG(PRTUINT128U, puDst, 0);
377 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
378
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
380 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
381 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
382 IEM_MC_PREPARE_AVX_USAGE();
383
384 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
385 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
386 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
387 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
388
389 IEM_MC_ADVANCE_RIP_AND_FINISH();
390 IEM_MC_END();
391 }
392 }
393}
394
395
396/* Opcode VEX.0F 0x00 - invalid */
397/* Opcode VEX.0F 0x01 - invalid */
398/* Opcode VEX.0F 0x02 - invalid */
399/* Opcode VEX.0F 0x03 - invalid */
400/* Opcode VEX.0F 0x04 - invalid */
401/* Opcode VEX.0F 0x05 - invalid */
402/* Opcode VEX.0F 0x06 - invalid */
403/* Opcode VEX.0F 0x07 - invalid */
404/* Opcode VEX.0F 0x08 - invalid */
405/* Opcode VEX.0F 0x09 - invalid */
406/* Opcode VEX.0F 0x0a - invalid */
407
408/** Opcode VEX.0F 0x0b. */
409FNIEMOP_DEF(iemOp_vud2)
410{
411 IEMOP_MNEMONIC(vud2, "vud2");
412 IEMOP_RAISE_INVALID_OPCODE_RET();
413}
414
415/* Opcode VEX.0F 0x0c - invalid */
416/* Opcode VEX.0F 0x0d - invalid */
417/* Opcode VEX.0F 0x0e - invalid */
418/* Opcode VEX.0F 0x0f - invalid */
419
420
421/**
422 * @opcode 0x10
423 * @oppfx none
424 * @opcpuid avx
425 * @opgroup og_avx_simdfp_datamove
426 * @opxcpttype 4UA
427 * @optest op1=1 op2=2 -> op1=2
428 * @optest op1=0 op2=-22 -> op1=-22
429 */
430FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
431{
432 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
433 Assert(pVCpu->iem.s.uVexLength <= 1);
434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
435 if (IEM_IS_MODRM_REG_MODE(bRm))
436 {
437 /*
438 * Register, register.
439 */
440 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
441 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
443 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
444 if (pVCpu->iem.s.uVexLength == 0)
445 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 else
448 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
449 IEM_GET_MODRM_RM(pVCpu, bRm));
450 IEM_MC_ADVANCE_RIP_AND_FINISH();
451 IEM_MC_END();
452 }
453 else if (pVCpu->iem.s.uVexLength == 0)
454 {
455 /*
456 * 128-bit: Register, Memory
457 */
458 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
459 IEM_MC_LOCAL(RTUINT128U, uSrc);
460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
461
462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
463 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
465 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
466
467 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
468 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
469
470 IEM_MC_ADVANCE_RIP_AND_FINISH();
471 IEM_MC_END();
472 }
473 else
474 {
475 /*
476 * 256-bit: Register, Memory
477 */
478 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
479 IEM_MC_LOCAL(RTUINT256U, uSrc);
480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
481
482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
484 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
485 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
486
487 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
488 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
489
490 IEM_MC_ADVANCE_RIP_AND_FINISH();
491 IEM_MC_END();
492 }
493}
494
495
496/**
497 * @opcode 0x10
498 * @oppfx 0x66
499 * @opcpuid avx
500 * @opgroup og_avx_simdfp_datamove
501 * @opxcpttype 4UA
502 * @optest op1=1 op2=2 -> op1=2
503 * @optest op1=0 op2=-22 -> op1=-22
504 */
505FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
506{
507 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
508 Assert(pVCpu->iem.s.uVexLength <= 1);
509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
510 if (IEM_IS_MODRM_REG_MODE(bRm))
511 {
512 /*
513 * Register, register.
514 */
515 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
516 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
517 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
518 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
519 if (pVCpu->iem.s.uVexLength == 0)
520 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
521 IEM_GET_MODRM_RM(pVCpu, bRm));
522 else
523 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
524 IEM_GET_MODRM_RM(pVCpu, bRm));
525 IEM_MC_ADVANCE_RIP_AND_FINISH();
526 IEM_MC_END();
527 }
528 else if (pVCpu->iem.s.uVexLength == 0)
529 {
530 /*
531 * 128-bit: Memory, register.
532 */
533 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
534 IEM_MC_LOCAL(RTUINT128U, uSrc);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
536
537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
538 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
539 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
540 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
541
542 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
543 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
544
545 IEM_MC_ADVANCE_RIP_AND_FINISH();
546 IEM_MC_END();
547 }
548 else
549 {
550 /*
551 * 256-bit: Memory, register.
552 */
553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
554 IEM_MC_LOCAL(RTUINT256U, uSrc);
555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
556
557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
558 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
559 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
560 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
561
562 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
563 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
564
565 IEM_MC_ADVANCE_RIP_AND_FINISH();
566 IEM_MC_END();
567 }
568}
569
570
571FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
572{
573 Assert(pVCpu->iem.s.uVexLength <= 1);
574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
575 if (IEM_IS_MODRM_REG_MODE(bRm))
576 {
577 /**
578 * @opcode 0x10
579 * @oppfx 0xf3
580 * @opcodesub 11 mr/reg
581 * @opcpuid avx
582 * @opgroup og_avx_simdfp_datamerge
583 * @opxcpttype 5
584 * @optest op1=1 op2=0 op3=2 -> op1=2
585 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
586 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
587 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
588 * @note HssHi refers to bits 127:32.
589 */
590 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
591 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
592 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP_AND_FINISH();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP_AND_FINISH();
629 IEM_MC_END();
630 }
631}
632
633
634FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
635{
636 Assert(pVCpu->iem.s.uVexLength <= 1);
637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
638 if (IEM_IS_MODRM_REG_MODE(bRm))
639 {
640 /**
641 * @opcode 0x10
642 * @oppfx 0xf2
643 * @opcodesub 11 mr/reg
644 * @opcpuid avx
645 * @opgroup og_avx_simdfp_datamerge
646 * @opxcpttype 5
647 * @optest op1=1 op2=0 op3=2 -> op1=2
648 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
649 * @optest op1=3 op2=-1 op3=0x77 ->
650 * op1=0xffffffffffffffff0000000000000077
651 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
652 */
653 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
655 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
656
657 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
658 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
659 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
660 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
661 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
662 IEM_MC_ADVANCE_RIP_AND_FINISH();
663 IEM_MC_END();
664 }
665 else
666 {
667 /**
668 * @opdone
669 * @opcode 0x10
670 * @oppfx 0xf2
671 * @opcodesub !11 mr/reg
672 * @opcpuid avx
673 * @opgroup og_avx_simdfp_datamove
674 * @opxcpttype 5
675 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
676 * @optest op1=1 op2=2 -> op1=2
677 * @optest op1=0 op2=-22 -> op1=-22
678 */
679 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
680 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
681 IEM_MC_LOCAL(uint64_t, uSrc);
682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
683
684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
685 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
687 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
688
689 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
690 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
691
692 IEM_MC_ADVANCE_RIP_AND_FINISH();
693 IEM_MC_END();
694 }
695}
696
697
698/**
699 * @opcode 0x11
700 * @oppfx none
701 * @opcpuid avx
702 * @opgroup og_avx_simdfp_datamove
703 * @opxcpttype 4UA
704 * @optest op1=1 op2=2 -> op1=2
705 * @optest op1=0 op2=-22 -> op1=-22
706 */
707FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
708{
709 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
710 Assert(pVCpu->iem.s.uVexLength <= 1);
711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
712 if (IEM_IS_MODRM_REG_MODE(bRm))
713 {
714 /*
715 * Register, register.
716 */
717 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
718 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
719 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
720 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
721 if (pVCpu->iem.s.uVexLength == 0)
722 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
723 IEM_GET_MODRM_REG(pVCpu, bRm));
724 else
725 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
726 IEM_GET_MODRM_REG(pVCpu, bRm));
727 IEM_MC_ADVANCE_RIP_AND_FINISH();
728 IEM_MC_END();
729 }
730 else if (pVCpu->iem.s.uVexLength == 0)
731 {
732 /*
733 * 128-bit: Memory, register.
734 */
735 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
736 IEM_MC_LOCAL(RTUINT128U, uSrc);
737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
738
739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
740 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
741 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
742 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
743
744 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
745 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
746
747 IEM_MC_ADVANCE_RIP_AND_FINISH();
748 IEM_MC_END();
749 }
750 else
751 {
752 /*
753 * 256-bit: Memory, register.
754 */
755 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
756 IEM_MC_LOCAL(RTUINT256U, uSrc);
757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
758
759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
760 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
761 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
762 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
763
764 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
765 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
766
767 IEM_MC_ADVANCE_RIP_AND_FINISH();
768 IEM_MC_END();
769 }
770}
771
772
773/**
774 * @opcode 0x11
775 * @oppfx 0x66
776 * @opcpuid avx
777 * @opgroup og_avx_simdfp_datamove
778 * @opxcpttype 4UA
779 * @optest op1=1 op2=2 -> op1=2
780 * @optest op1=0 op2=-22 -> op1=-22
781 */
782FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
783{
784 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
785 Assert(pVCpu->iem.s.uVexLength <= 1);
786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
787 if (IEM_IS_MODRM_REG_MODE(bRm))
788 {
789 /*
790 * Register, register.
791 */
792 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
793 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
794 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
795 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
796 if (pVCpu->iem.s.uVexLength == 0)
797 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
798 IEM_GET_MODRM_REG(pVCpu, bRm));
799 else
800 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
801 IEM_GET_MODRM_REG(pVCpu, bRm));
802 IEM_MC_ADVANCE_RIP_AND_FINISH();
803 IEM_MC_END();
804 }
805 else if (pVCpu->iem.s.uVexLength == 0)
806 {
807 /*
808 * 128-bit: Memory, register.
809 */
810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
811 IEM_MC_LOCAL(RTUINT128U, uSrc);
812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
813
814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
815 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
817 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
818
819 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
820 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
821
822 IEM_MC_ADVANCE_RIP_AND_FINISH();
823 IEM_MC_END();
824 }
825 else
826 {
827 /*
828 * 256-bit: Memory, register.
829 */
830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
831 IEM_MC_LOCAL(RTUINT256U, uSrc);
832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
833
834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
835 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
836 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
837 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
838
839 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
840 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
841
842 IEM_MC_ADVANCE_RIP_AND_FINISH();
843 IEM_MC_END();
844 }
845}
846
847
848FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
849{
850 Assert(pVCpu->iem.s.uVexLength <= 1);
851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
852 if (IEM_IS_MODRM_REG_MODE(bRm))
853 {
854 /**
855 * @opcode 0x11
856 * @oppfx 0xf3
857 * @opcodesub 11 mr/reg
858 * @opcpuid avx
859 * @opgroup og_avx_simdfp_datamerge
860 * @opxcpttype 5
861 * @optest op1=1 op2=0 op3=2 -> op1=2
862 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
863 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
864 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
865 */
866 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
867 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
868 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
869
870 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
871 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
872 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
873 IEM_GET_MODRM_REG(pVCpu, bRm),
874 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
875 IEM_MC_ADVANCE_RIP_AND_FINISH();
876 IEM_MC_END();
877 }
878 else
879 {
880 /**
881 * @opdone
882 * @opcode 0x11
883 * @oppfx 0xf3
884 * @opcodesub !11 mr/reg
885 * @opcpuid avx
886 * @opgroup og_avx_simdfp_datamove
887 * @opxcpttype 5
888 * @opfunction iemOp_vmovss_Vss_Hss_Wss
889 * @optest op1=1 op2=2 -> op1=2
890 * @optest op1=0 op2=-22 -> op1=-22
891 */
892 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
893 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEM_MC_LOCAL(uint32_t, uSrc);
895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
896
897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
898 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
899 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
900 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
901
902 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
903 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
904
905 IEM_MC_ADVANCE_RIP_AND_FINISH();
906 IEM_MC_END();
907 }
908}
909
910
911FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
912{
913 Assert(pVCpu->iem.s.uVexLength <= 1);
914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
915 if (IEM_IS_MODRM_REG_MODE(bRm))
916 {
917 /**
918 * @opcode 0x11
919 * @oppfx 0xf2
920 * @opcodesub 11 mr/reg
921 * @opcpuid avx
922 * @opgroup og_avx_simdfp_datamerge
923 * @opxcpttype 5
924 * @optest op1=1 op2=0 op3=2 -> op1=2
925 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
926 * @optest op1=3 op2=-1 op3=0x77 ->
927 * op1=0xffffffffffffffff0000000000000077
928 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
929 */
930 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
931 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
932 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
933
934 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
935 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
936 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
937 IEM_GET_MODRM_REG(pVCpu, bRm),
938 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
939 IEM_MC_ADVANCE_RIP_AND_FINISH();
940 IEM_MC_END();
941 }
942 else
943 {
944 /**
945 * @opdone
946 * @opcode 0x11
947 * @oppfx 0xf2
948 * @opcodesub !11 mr/reg
949 * @opcpuid avx
950 * @opgroup og_avx_simdfp_datamove
951 * @opxcpttype 5
952 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
953 * @optest op1=1 op2=2 -> op1=2
954 * @optest op1=0 op2=-22 -> op1=-22
955 */
956 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
957 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEM_MC_LOCAL(uint64_t, uSrc);
959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
960
961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
962 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
963 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
964 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
965
966 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
967 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
968
969 IEM_MC_ADVANCE_RIP_AND_FINISH();
970 IEM_MC_END();
971 }
972}
973
974
975FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
976{
977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
978 if (IEM_IS_MODRM_REG_MODE(bRm))
979 {
980 /**
981 * @opcode 0x12
982 * @opcodesub 11 mr/reg
983 * @oppfx none
984 * @opcpuid avx
985 * @opgroup og_avx_simdfp_datamerge
986 * @opxcpttype 7LZ
987 * @optest op2=0x2200220122022203
988 * op3=0x3304330533063307
989 * -> op1=0x22002201220222033304330533063307
990 * @optest op2=-1 op3=-42 -> op1=-42
991 * @note op3 and op2 are only the 8-byte high XMM register halfs.
992 */
993 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
995 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
996
997 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
998 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
999 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1000 IEM_GET_MODRM_RM(pVCpu, bRm),
1001 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1002
1003 IEM_MC_ADVANCE_RIP_AND_FINISH();
1004 IEM_MC_END();
1005 }
1006 else
1007 {
1008 /**
1009 * @opdone
1010 * @opcode 0x12
1011 * @opcodesub !11 mr/reg
1012 * @oppfx none
1013 * @opcpuid avx
1014 * @opgroup og_avx_simdfp_datamove
1015 * @opxcpttype 5LZ
1016 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1017 * @optest op1=1 op2=0 op3=0 -> op1=0
1018 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1019 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1020 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1021 */
1022 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1023
1024 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1025 IEM_MC_LOCAL(uint64_t, uSrc);
1026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1027
1028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1029 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1031 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1032
1033 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1034 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1035 uSrc,
1036 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1037
1038 IEM_MC_ADVANCE_RIP_AND_FINISH();
1039 IEM_MC_END();
1040 }
1041}
1042
1043
1044/**
1045 * @opcode 0x12
1046 * @opcodesub !11 mr/reg
1047 * @oppfx 0x66
1048 * @opcpuid avx
1049 * @opgroup og_avx_pcksclr_datamerge
1050 * @opxcpttype 5LZ
1051 * @optest op2=0 op3=2 -> op1=2
1052 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1053 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1054 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1055 */
1056FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1057{
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 if (IEM_IS_MODRM_MEM_MODE(bRm))
1060 {
1061 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1062
1063 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1064 IEM_MC_LOCAL(uint64_t, uSrc);
1065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1066
1067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1068 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1070 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1071
1072 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1073 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1074 uSrc,
1075 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1076
1077 IEM_MC_ADVANCE_RIP_AND_FINISH();
1078 IEM_MC_END();
1079 }
1080
1081 /**
1082 * @opdone
1083 * @opmnemonic udvex660f12m3
1084 * @opcode 0x12
1085 * @opcodesub 11 mr/reg
1086 * @oppfx 0x66
1087 * @opunused immediate
1088 * @opcpuid avx
1089 * @optest ->
1090 */
1091 else
1092 IEMOP_RAISE_INVALID_OPCODE_RET();
1093}
1094
1095
1096/**
1097 * @opcode 0x12
1098 * @oppfx 0xf3
1099 * @opcpuid avx
1100 * @opgroup og_avx_pcksclr_datamove
1101 * @opxcpttype 4
1102 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1103 * -> op1=0x00000002000000020000000100000001
1104 * @optest vex.l==1 /
1105 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1106 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1107 */
1108FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1109{
1110 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1111 Assert(pVCpu->iem.s.uVexLength <= 1);
1112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1113 if (IEM_IS_MODRM_REG_MODE(bRm))
1114 {
1115 /*
1116 * Register, register.
1117 */
1118 if (pVCpu->iem.s.uVexLength == 0)
1119 {
1120 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1121 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1122 IEM_MC_LOCAL(RTUINT128U, uSrc);
1123
1124 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1125 IEM_MC_PREPARE_AVX_USAGE();
1126
1127 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1128 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1129 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1130 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1131 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1132 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1133
1134 IEM_MC_ADVANCE_RIP_AND_FINISH();
1135 IEM_MC_END();
1136 }
1137 else
1138 {
1139 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1140 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1141 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1142 IEM_MC_PREPARE_AVX_USAGE();
1143
1144 IEM_MC_LOCAL(RTUINT256U, uSrc);
1145 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1146 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1147 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1148 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1149 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1150 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1151 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1152 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1153 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1154 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1155
1156 IEM_MC_ADVANCE_RIP_AND_FINISH();
1157 IEM_MC_END();
1158 }
1159 }
1160 else
1161 {
1162 /*
1163 * Register, memory.
1164 */
1165 if (pVCpu->iem.s.uVexLength == 0)
1166 {
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(RTUINT128U, uSrc);
1169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1170
1171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1172 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1173 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1174 IEM_MC_PREPARE_AVX_USAGE();
1175
1176 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1177 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1178 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1179 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1180 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1181 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1182
1183 IEM_MC_ADVANCE_RIP_AND_FINISH();
1184 IEM_MC_END();
1185 }
1186 else
1187 {
1188 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1191 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1192 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1193 IEM_MC_PREPARE_AVX_USAGE();
1194
1195 IEM_MC_LOCAL(RTUINT256U, uSrc);
1196 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197
1198 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1199 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1200 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1201 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1202 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1203 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1204 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1205 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1206 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1207
1208 IEM_MC_ADVANCE_RIP_AND_FINISH();
1209 IEM_MC_END();
1210 }
1211 }
1212}
1213
1214
1215/**
1216 * @opcode 0x12
1217 * @oppfx 0xf2
1218 * @opcpuid avx
1219 * @opgroup og_avx_pcksclr_datamove
1220 * @opxcpttype 5
1221 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1222 * -> op1=0x22222222111111112222222211111111
1223 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1224 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1225 */
1226FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1227{
1228 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1230 if (IEM_IS_MODRM_REG_MODE(bRm))
1231 {
1232 /*
1233 * Register, register.
1234 */
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1238 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1239 IEM_MC_LOCAL(uint64_t, uSrc);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1245 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1246 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP_AND_FINISH();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1255 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1256 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1257 IEM_MC_PREPARE_AVX_USAGE();
1258
1259 IEM_MC_LOCAL(uint64_t, uSrc1);
1260 IEM_MC_LOCAL(uint64_t, uSrc2);
1261 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1262 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1263
1264 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1265 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1266 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1267 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1268 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1269
1270 IEM_MC_ADVANCE_RIP_AND_FINISH();
1271 IEM_MC_END();
1272 }
1273 }
1274 else
1275 {
1276 /*
1277 * Register, memory.
1278 */
1279 if (pVCpu->iem.s.uVexLength == 0)
1280 {
1281 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1283 IEM_MC_LOCAL(uint64_t, uSrc);
1284
1285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1286 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1288 IEM_MC_PREPARE_AVX_USAGE();
1289
1290 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1291 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1292 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1293 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1294
1295 IEM_MC_ADVANCE_RIP_AND_FINISH();
1296 IEM_MC_END();
1297 }
1298 else
1299 {
1300 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1302
1303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1305 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1306 IEM_MC_PREPARE_AVX_USAGE();
1307
1308 IEM_MC_LOCAL(RTUINT256U, uSrc);
1309 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1310
1311 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1312 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1313 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1314 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1315 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1316
1317 IEM_MC_ADVANCE_RIP_AND_FINISH();
1318 IEM_MC_END();
1319 }
1320 }
1321}
1322
1323
1324/**
1325 * @opcode 0x13
1326 * @opcodesub !11 mr/reg
1327 * @oppfx none
1328 * @opcpuid avx
1329 * @opgroup og_avx_simdfp_datamove
1330 * @opxcpttype 5
1331 * @optest op1=1 op2=2 -> op1=2
1332 * @optest op1=0 op2=-42 -> op1=-42
1333 */
1334FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1335{
1336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1337 if (IEM_IS_MODRM_MEM_MODE(bRm))
1338 {
1339 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1340
1341 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1342 IEM_MC_LOCAL(uint64_t, uSrc);
1343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1344
1345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1346 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1347 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1348 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1349
1350 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1351 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1352
1353 IEM_MC_ADVANCE_RIP_AND_FINISH();
1354 IEM_MC_END();
1355 }
1356
1357 /**
1358 * @opdone
1359 * @opmnemonic udvex0f13m3
1360 * @opcode 0x13
1361 * @opcodesub 11 mr/reg
1362 * @oppfx none
1363 * @opunused immediate
1364 * @opcpuid avx
1365 * @optest ->
1366 */
1367 else
1368 IEMOP_RAISE_INVALID_OPCODE_RET();
1369}
1370
1371
1372/**
1373 * @opcode 0x13
1374 * @opcodesub !11 mr/reg
1375 * @oppfx 0x66
1376 * @opcpuid avx
1377 * @opgroup og_avx_pcksclr_datamove
1378 * @opxcpttype 5
1379 * @optest op1=1 op2=2 -> op1=2
1380 * @optest op1=0 op2=-42 -> op1=-42
1381 */
1382FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1383{
1384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1385 if (IEM_IS_MODRM_MEM_MODE(bRm))
1386 {
1387 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1388 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1389 IEM_MC_LOCAL(uint64_t, uSrc);
1390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1391
1392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1393 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1394 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1395 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1396
1397 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1398 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1399
1400 IEM_MC_ADVANCE_RIP_AND_FINISH();
1401 IEM_MC_END();
1402 }
1403
1404 /**
1405 * @opdone
1406 * @opmnemonic udvex660f13m3
1407 * @opcode 0x13
1408 * @opcodesub 11 mr/reg
1409 * @oppfx 0x66
1410 * @opunused immediate
1411 * @opcpuid avx
1412 * @optest ->
1413 */
1414 else
1415 IEMOP_RAISE_INVALID_OPCODE_RET();
1416}
1417
1418/* Opcode VEX.F3.0F 0x13 - invalid */
1419/* Opcode VEX.F2.0F 0x13 - invalid */
1420
1421/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1422FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1423{
1424 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1425 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1426 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1427}
1428
1429
1430/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1431FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1432{
1433 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1434 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1435 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1436}
1437
1438
1439/* Opcode VEX.F3.0F 0x14 - invalid */
1440/* Opcode VEX.F2.0F 0x14 - invalid */
1441
1442
1443/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1444FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1445{
1446 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1447 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1449}
1450
1451
1452/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1453FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1454{
1455 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1456 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1457 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1458}
1459
1460
1461/* Opcode VEX.F3.0F 0x15 - invalid */
1462/* Opcode VEX.F2.0F 0x15 - invalid */
1463
1464
1465FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1466{
1467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1468 if (IEM_IS_MODRM_REG_MODE(bRm))
1469 {
1470 /**
1471 * @opcode 0x16
1472 * @opcodesub 11 mr/reg
1473 * @oppfx none
1474 * @opcpuid avx
1475 * @opgroup og_avx_simdfp_datamerge
1476 * @opxcpttype 7LZ
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1481 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1482
1483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1485 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1486 IEM_GET_MODRM_RM(pVCpu, bRm),
1487 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1488
1489 IEM_MC_ADVANCE_RIP_AND_FINISH();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /**
1495 * @opdone
1496 * @opcode 0x16
1497 * @opcodesub !11 mr/reg
1498 * @oppfx none
1499 * @opcpuid avx
1500 * @opgroup og_avx_simdfp_datamove
1501 * @opxcpttype 5LZ
1502 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1503 */
1504 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1505
1506 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1507 IEM_MC_LOCAL(uint64_t, uSrc);
1508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1509
1510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1511 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1514
1515 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1516 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1517 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1518 uSrc);
1519
1520 IEM_MC_ADVANCE_RIP_AND_FINISH();
1521 IEM_MC_END();
1522 }
1523}
1524
1525
1526/**
1527 * @opcode 0x16
1528 * @opcodesub !11 mr/reg
1529 * @oppfx 0x66
1530 * @opcpuid avx
1531 * @opgroup og_avx_pcksclr_datamerge
1532 * @opxcpttype 5LZ
1533 */
1534FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1535{
1536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1537 if (IEM_IS_MODRM_MEM_MODE(bRm))
1538 {
1539 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1540
1541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1542 IEM_MC_LOCAL(uint64_t, uSrc);
1543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1544
1545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1546 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1549
1550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1551 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1552 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1553 uSrc);
1554
1555 IEM_MC_ADVANCE_RIP_AND_FINISH();
1556 IEM_MC_END();
1557 }
1558
1559 /**
1560 * @opdone
1561 * @opmnemonic udvex660f16m3
1562 * @opcode 0x12
1563 * @opcodesub 11 mr/reg
1564 * @oppfx 0x66
1565 * @opunused immediate
1566 * @opcpuid avx
1567 * @optest ->
1568 */
1569 else
1570 IEMOP_RAISE_INVALID_OPCODE_RET();
1571}
1572
1573
1574/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1575/**
1576 * @opcode 0x16
1577 * @oppfx 0xf3
1578 * @opcpuid avx
1579 * @opgroup og_avx_pcksclr_datamove
1580 * @opxcpttype 4
1581 */
1582FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1583{
1584 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1585 Assert(pVCpu->iem.s.uVexLength <= 1);
1586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1587 if (IEM_IS_MODRM_REG_MODE(bRm))
1588 {
1589 /*
1590 * Register, register.
1591 */
1592 if (pVCpu->iem.s.uVexLength == 0)
1593 {
1594 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1595 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1596 IEM_MC_LOCAL(RTUINT128U, uSrc);
1597
1598 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1599 IEM_MC_PREPARE_AVX_USAGE();
1600
1601 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1602 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1605 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1606 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1607
1608 IEM_MC_ADVANCE_RIP_AND_FINISH();
1609 IEM_MC_END();
1610 }
1611 else
1612 {
1613 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1614 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1615 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1616 IEM_MC_PREPARE_AVX_USAGE();
1617
1618 IEM_MC_LOCAL(RTUINT256U, uSrc);
1619 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1620 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1621 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1622 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1623 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1624 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1625 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1626 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1627 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1628 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1629
1630 IEM_MC_ADVANCE_RIP_AND_FINISH();
1631 IEM_MC_END();
1632 }
1633 }
1634 else
1635 {
1636 /*
1637 * Register, memory.
1638 */
1639 if (pVCpu->iem.s.uVexLength == 0)
1640 {
1641 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1642 IEM_MC_LOCAL(RTUINT128U, uSrc);
1643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1644
1645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1646 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1647 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1648 IEM_MC_PREPARE_AVX_USAGE();
1649
1650 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1653 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1654 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1655 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1656
1657 IEM_MC_ADVANCE_RIP_AND_FINISH();
1658 IEM_MC_END();
1659 }
1660 else
1661 {
1662 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1665 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1666 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1667 IEM_MC_PREPARE_AVX_USAGE();
1668
1669 IEM_MC_LOCAL(RTUINT256U, uSrc);
1670 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1671
1672 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1673 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1674 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1675 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1676 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1677 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1678 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1679 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1680 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1681
1682 IEM_MC_ADVANCE_RIP_AND_FINISH();
1683 IEM_MC_END();
1684 }
1685 }
1686}
1687
1688
1689/* Opcode VEX.F2.0F 0x16 - invalid */
1690
1691
1692/**
1693 * @opcode 0x17
1694 * @opcodesub !11 mr/reg
1695 * @oppfx none
1696 * @opcpuid avx
1697 * @opgroup og_avx_simdfp_datamove
1698 * @opxcpttype 5
1699 */
1700FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1701{
1702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1703 if (IEM_IS_MODRM_MEM_MODE(bRm))
1704 {
1705 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1706
1707 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1708 IEM_MC_LOCAL(uint64_t, uSrc);
1709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1710
1711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1712 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1715
1716 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1717 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1718
1719 IEM_MC_ADVANCE_RIP_AND_FINISH();
1720 IEM_MC_END();
1721 }
1722
1723 /**
1724 * @opdone
1725 * @opmnemonic udvex0f17m3
1726 * @opcode 0x17
1727 * @opcodesub 11 mr/reg
1728 * @oppfx none
1729 * @opunused immediate
1730 * @opcpuid avx
1731 * @optest ->
1732 */
1733 else
1734 IEMOP_RAISE_INVALID_OPCODE_RET();
1735}
1736
1737
1738/**
1739 * @opcode 0x17
1740 * @opcodesub !11 mr/reg
1741 * @oppfx 0x66
1742 * @opcpuid avx
1743 * @opgroup og_avx_pcksclr_datamove
1744 * @opxcpttype 5
1745 */
1746FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1747{
1748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1749 if (IEM_IS_MODRM_MEM_MODE(bRm))
1750 {
1751 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1752
1753 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1754 IEM_MC_LOCAL(uint64_t, uSrc);
1755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1756
1757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1758 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1761
1762 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1763 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1764
1765 IEM_MC_ADVANCE_RIP_AND_FINISH();
1766 IEM_MC_END();
1767 }
1768
1769 /**
1770 * @opdone
1771 * @opmnemonic udvex660f17m3
1772 * @opcode 0x17
1773 * @opcodesub 11 mr/reg
1774 * @oppfx 0x66
1775 * @opunused immediate
1776 * @opcpuid avx
1777 * @optest ->
1778 */
1779 else
1780 IEMOP_RAISE_INVALID_OPCODE_RET();
1781}
1782
1783
1784/* Opcode VEX.F3.0F 0x17 - invalid */
1785/* Opcode VEX.F2.0F 0x17 - invalid */
1786
1787
1788/* Opcode VEX.0F 0x18 - invalid */
1789/* Opcode VEX.0F 0x19 - invalid */
1790/* Opcode VEX.0F 0x1a - invalid */
1791/* Opcode VEX.0F 0x1b - invalid */
1792/* Opcode VEX.0F 0x1c - invalid */
1793/* Opcode VEX.0F 0x1d - invalid */
1794/* Opcode VEX.0F 0x1e - invalid */
1795/* Opcode VEX.0F 0x1f - invalid */
1796
1797/* Opcode VEX.0F 0x20 - invalid */
1798/* Opcode VEX.0F 0x21 - invalid */
1799/* Opcode VEX.0F 0x22 - invalid */
1800/* Opcode VEX.0F 0x23 - invalid */
1801/* Opcode VEX.0F 0x24 - invalid */
1802/* Opcode VEX.0F 0x25 - invalid */
1803/* Opcode VEX.0F 0x26 - invalid */
1804/* Opcode VEX.0F 0x27 - invalid */
1805
1806/**
1807 * @opcode 0x28
1808 * @oppfx none
1809 * @opcpuid avx
1810 * @opgroup og_avx_pcksclr_datamove
1811 * @opxcpttype 1
1812 * @optest op1=1 op2=2 -> op1=2
1813 * @optest op1=0 op2=-42 -> op1=-42
1814 * @note Almost identical to vmovapd.
1815 */
1816FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1817{
1818 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1819 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1820 Assert(pVCpu->iem.s.uVexLength <= 1);
1821 if (IEM_IS_MODRM_REG_MODE(bRm))
1822 {
1823 /*
1824 * Register, register.
1825 */
1826 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1827 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1828
1829 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1830 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1831 if (pVCpu->iem.s.uVexLength == 0)
1832 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1833 IEM_GET_MODRM_RM(pVCpu, bRm));
1834 else
1835 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1836 IEM_GET_MODRM_RM(pVCpu, bRm));
1837 IEM_MC_ADVANCE_RIP_AND_FINISH();
1838 IEM_MC_END();
1839 }
1840 else
1841 {
1842 /*
1843 * Register, memory.
1844 */
1845 if (pVCpu->iem.s.uVexLength == 0)
1846 {
1847 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1849 IEM_MC_LOCAL(RTUINT128U, uSrc);
1850
1851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1852 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1853 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1854 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1855
1856 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1857 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1858
1859 IEM_MC_ADVANCE_RIP_AND_FINISH();
1860 IEM_MC_END();
1861 }
1862 else
1863 {
1864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1866 IEM_MC_LOCAL(RTUINT256U, uSrc);
1867
1868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1869 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1870 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1871 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1872
1873 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1874 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1875
1876 IEM_MC_ADVANCE_RIP_AND_FINISH();
1877 IEM_MC_END();
1878 }
1879 }
1880}
1881
1882
1883/**
1884 * @opcode 0x28
1885 * @oppfx 66
1886 * @opcpuid avx
1887 * @opgroup og_avx_pcksclr_datamove
1888 * @opxcpttype 1
1889 * @optest op1=1 op2=2 -> op1=2
1890 * @optest op1=0 op2=-42 -> op1=-42
1891 * @note Almost identical to vmovaps
1892 */
1893FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1894{
1895 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1897 Assert(pVCpu->iem.s.uVexLength <= 1);
1898 if (IEM_IS_MODRM_REG_MODE(bRm))
1899 {
1900 /*
1901 * Register, register.
1902 */
1903 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1905
1906 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1907 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1908 if (pVCpu->iem.s.uVexLength == 0)
1909 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1910 IEM_GET_MODRM_RM(pVCpu, bRm));
1911 else
1912 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1913 IEM_GET_MODRM_RM(pVCpu, bRm));
1914 IEM_MC_ADVANCE_RIP_AND_FINISH();
1915 IEM_MC_END();
1916 }
1917 else
1918 {
1919 /*
1920 * Register, memory.
1921 */
1922 if (pVCpu->iem.s.uVexLength == 0)
1923 {
1924 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1926 IEM_MC_LOCAL(RTUINT128U, uSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1930 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1931 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1932
1933 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1934 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1935
1936 IEM_MC_ADVANCE_RIP_AND_FINISH();
1937 IEM_MC_END();
1938 }
1939 else
1940 {
1941 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1943 IEM_MC_LOCAL(RTUINT256U, uSrc);
1944
1945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1948 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1949
1950 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1952
1953 IEM_MC_ADVANCE_RIP_AND_FINISH();
1954 IEM_MC_END();
1955 }
1956 }
1957}
1958
1959/**
1960 * @opmnemonic udvexf30f28
1961 * @opcode 0x28
1962 * @oppfx 0xf3
1963 * @opunused vex.modrm
1964 * @opcpuid avx
1965 * @optest ->
1966 * @opdone
1967 */
1968
1969/**
1970 * @opmnemonic udvexf20f28
1971 * @opcode 0x28
1972 * @oppfx 0xf2
1973 * @opunused vex.modrm
1974 * @opcpuid avx
1975 * @optest ->
1976 * @opdone
1977 */
1978
1979/**
1980 * @opcode 0x29
1981 * @oppfx none
1982 * @opcpuid avx
1983 * @opgroup og_avx_pcksclr_datamove
1984 * @opxcpttype 1
1985 * @optest op1=1 op2=2 -> op1=2
1986 * @optest op1=0 op2=-42 -> op1=-42
1987 * @note Almost identical to vmovapd.
1988 */
1989FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1990{
1991 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1992 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1993 Assert(pVCpu->iem.s.uVexLength <= 1);
1994 if (IEM_IS_MODRM_REG_MODE(bRm))
1995 {
1996 /*
1997 * Register, register.
1998 */
1999 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2000 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2001
2002 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2003 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2004 if (pVCpu->iem.s.uVexLength == 0)
2005 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2006 IEM_GET_MODRM_REG(pVCpu, bRm));
2007 else
2008 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2009 IEM_GET_MODRM_REG(pVCpu, bRm));
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012 }
2013 else
2014 {
2015 /*
2016 * Register, memory.
2017 */
2018 if (pVCpu->iem.s.uVexLength == 0)
2019 {
2020 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2022 IEM_MC_LOCAL(RTUINT128U, uSrc);
2023
2024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2025 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2026 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2027 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2028
2029 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2030 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2031
2032 IEM_MC_ADVANCE_RIP_AND_FINISH();
2033 IEM_MC_END();
2034 }
2035 else
2036 {
2037 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2039 IEM_MC_LOCAL(RTUINT256U, uSrc);
2040
2041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2042 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2043 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2044 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2045
2046 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2047 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2048
2049 IEM_MC_ADVANCE_RIP_AND_FINISH();
2050 IEM_MC_END();
2051 }
2052 }
2053}
2054
2055/**
2056 * @opcode 0x29
2057 * @oppfx 66
2058 * @opcpuid avx
2059 * @opgroup og_avx_pcksclr_datamove
2060 * @opxcpttype 1
2061 * @optest op1=1 op2=2 -> op1=2
2062 * @optest op1=0 op2=-42 -> op1=-42
2063 * @note Almost identical to vmovaps
2064 */
2065FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2066{
2067 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2068 Assert(pVCpu->iem.s.uVexLength <= 1);
2069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2070 if (IEM_IS_MODRM_REG_MODE(bRm))
2071 {
2072 /*
2073 * Register, register.
2074 */
2075 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2077
2078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2079 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2080 if (pVCpu->iem.s.uVexLength == 0)
2081 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2082 IEM_GET_MODRM_REG(pVCpu, bRm));
2083 else
2084 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2085 IEM_GET_MODRM_REG(pVCpu, bRm));
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089 else
2090 {
2091 /*
2092 * Register, memory.
2093 */
2094 if (pVCpu->iem.s.uVexLength == 0)
2095 {
2096 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT128U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2106 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP_AND_FINISH();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2115 IEM_MC_LOCAL(RTUINT256U, uSrc);
2116
2117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2118 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2119 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2120 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2121
2122 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2123 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2124
2125 IEM_MC_ADVANCE_RIP_AND_FINISH();
2126 IEM_MC_END();
2127 }
2128 }
2129}
2130
2131
2132/**
2133 * @opmnemonic udvexf30f29
2134 * @opcode 0x29
2135 * @oppfx 0xf3
2136 * @opunused vex.modrm
2137 * @opcpuid avx
2138 * @optest ->
2139 * @opdone
2140 */
2141
2142/**
2143 * @opmnemonic udvexf20f29
2144 * @opcode 0x29
2145 * @oppfx 0xf2
2146 * @opunused vex.modrm
2147 * @opcpuid avx
2148 * @optest ->
2149 * @opdone
2150 */
2151
2152
2153/** Opcode VEX.0F 0x2a - invalid */
2154/** Opcode VEX.66.0F 0x2a - invalid */
2155/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2156FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2157/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2158FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2159
2160
2161/**
2162 * @opcode 0x2b
2163 * @opcodesub !11 mr/reg
2164 * @oppfx none
2165 * @opcpuid avx
2166 * @opgroup og_avx_cachect
2167 * @opxcpttype 1
2168 * @optest op1=1 op2=2 -> op1=2
2169 * @optest op1=0 op2=-42 -> op1=-42
2170 * @note Identical implementation to vmovntpd
2171 */
2172FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2173{
2174 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2175 Assert(pVCpu->iem.s.uVexLength <= 1);
2176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2177 if (IEM_IS_MODRM_MEM_MODE(bRm))
2178 {
2179 /*
2180 * memory, register.
2181 */
2182 if (pVCpu->iem.s.uVexLength == 0)
2183 {
2184 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2185 IEM_MC_LOCAL(RTUINT128U, uSrc);
2186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2187
2188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2189 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2190 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2191 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2192
2193 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2194 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2195
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2202 IEM_MC_LOCAL(RTUINT256U, uSrc);
2203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2204
2205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2206 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2207 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2208 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2209
2210 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2211 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2212
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 }
2216 }
2217 /* The register, register encoding is invalid. */
2218 else
2219 IEMOP_RAISE_INVALID_OPCODE_RET();
2220}
2221
2222/**
2223 * @opcode 0x2b
2224 * @opcodesub !11 mr/reg
2225 * @oppfx 0x66
2226 * @opcpuid avx
2227 * @opgroup og_avx_cachect
2228 * @opxcpttype 1
2229 * @optest op1=1 op2=2 -> op1=2
2230 * @optest op1=0 op2=-42 -> op1=-42
2231 * @note Identical implementation to vmovntps
2232 */
2233FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2234{
2235 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2236 Assert(pVCpu->iem.s.uVexLength <= 1);
2237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2238 if (IEM_IS_MODRM_MEM_MODE(bRm))
2239 {
2240 /*
2241 * memory, register.
2242 */
2243 if (pVCpu->iem.s.uVexLength == 0)
2244 {
2245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2246 IEM_MC_LOCAL(RTUINT128U, uSrc);
2247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2248
2249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2250 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2251 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2252 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2253
2254 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2255 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2256
2257 IEM_MC_ADVANCE_RIP_AND_FINISH();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2263 IEM_MC_LOCAL(RTUINT256U, uSrc);
2264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2265
2266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2267 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2270
2271 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2272 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2273
2274 IEM_MC_ADVANCE_RIP_AND_FINISH();
2275 IEM_MC_END();
2276 }
2277 }
2278 /* The register, register encoding is invalid. */
2279 else
2280 IEMOP_RAISE_INVALID_OPCODE_RET();
2281}
2282
2283/**
2284 * @opmnemonic udvexf30f2b
2285 * @opcode 0x2b
2286 * @oppfx 0xf3
2287 * @opunused vex.modrm
2288 * @opcpuid avx
2289 * @optest ->
2290 * @opdone
2291 */
2292
2293/**
2294 * @opmnemonic udvexf20f2b
2295 * @opcode 0x2b
2296 * @oppfx 0xf2
2297 * @opunused vex.modrm
2298 * @opcpuid avx
2299 * @optest ->
2300 * @opdone
2301 */
2302
2303
2304/* Opcode VEX.0F 0x2c - invalid */
2305/* Opcode VEX.66.0F 0x2c - invalid */
2306/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2307FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2308/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2309FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2310
2311/* Opcode VEX.0F 0x2d - invalid */
2312/* Opcode VEX.66.0F 0x2d - invalid */
2313/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2314FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2315/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2316FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2317
2318
2319/**
2320 * @opcode 0x2e
2321 * @oppfx none
2322 * @opflmodify cf,pf,af,zf,sf,of
2323 * @opflclear af,sf,of
2324 */
2325FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2326{
2327 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2329 if (IEM_IS_MODRM_REG_MODE(bRm))
2330 {
2331 /*
2332 * Register, register.
2333 */
2334 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2335 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2336 IEM_MC_LOCAL(uint32_t, fEFlags);
2337 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2338 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2339 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
2340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2341 IEM_MC_PREPARE_AVX_USAGE();
2342 IEM_MC_FETCH_EFLAGS(fEFlags);
2343 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2344 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2345 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2346 pEFlags, puSrc1, puSrc2);
2347 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2348 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2349 } IEM_MC_ELSE() {
2350 IEM_MC_COMMIT_EFLAGS(fEFlags);
2351 } IEM_MC_ENDIF();
2352
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 }
2356 else
2357 {
2358 /*
2359 * Register, memory.
2360 */
2361 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2362 IEM_MC_LOCAL(uint32_t, fEFlags);
2363 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2364 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2365 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2366 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
2367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2368
2369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2370 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2371 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2372 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2373
2374 IEM_MC_PREPARE_AVX_USAGE();
2375 IEM_MC_FETCH_EFLAGS(fEFlags);
2376 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2377 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2378 pEFlags, puSrc1, puSrc2);
2379 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2380 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2381 } IEM_MC_ELSE() {
2382 IEM_MC_COMMIT_EFLAGS(fEFlags);
2383 } IEM_MC_ENDIF();
2384
2385 IEM_MC_ADVANCE_RIP_AND_FINISH();
2386 IEM_MC_END();
2387 }
2388}
2389
2390
2391/**
2392 * @opcode 0x2e
2393 * @oppfx 0x66
2394 * @opflmodify cf,pf,af,zf,sf,of
2395 * @opflclear af,sf,of
2396 */
2397FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2398{
2399 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2401 if (IEM_IS_MODRM_REG_MODE(bRm))
2402 {
2403 /*
2404 * Register, register.
2405 */
2406 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2407 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2408 IEM_MC_LOCAL(uint32_t, fEFlags);
2409 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2410 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2411 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
2412 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2413 IEM_MC_PREPARE_AVX_USAGE();
2414 IEM_MC_FETCH_EFLAGS(fEFlags);
2415 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2416 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2417 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2418 pEFlags, puSrc1, puSrc2);
2419 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2420 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2421 } IEM_MC_ELSE() {
2422 IEM_MC_COMMIT_EFLAGS(fEFlags);
2423 } IEM_MC_ENDIF();
2424
2425 IEM_MC_ADVANCE_RIP_AND_FINISH();
2426 IEM_MC_END();
2427 }
2428 else
2429 {
2430 /*
2431 * Register, memory.
2432 */
2433 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2434 IEM_MC_LOCAL(uint32_t, fEFlags);
2435 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2436 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2437 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2438 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
2439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2440
2441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2442 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2443 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2444 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2445
2446 IEM_MC_PREPARE_AVX_USAGE();
2447 IEM_MC_FETCH_EFLAGS(fEFlags);
2448 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2449 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2450 pEFlags, puSrc1, puSrc2);
2451 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2452 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2453 } IEM_MC_ELSE() {
2454 IEM_MC_COMMIT_EFLAGS(fEFlags);
2455 } IEM_MC_ENDIF();
2456
2457 IEM_MC_ADVANCE_RIP_AND_FINISH();
2458 IEM_MC_END();
2459 }
2460}
2461
2462
2463/* Opcode VEX.F3.0F 0x2e - invalid */
2464/* Opcode VEX.F2.0F 0x2e - invalid */
2465
2466/**
2467 * @opcode 0x2f
2468 * @oppfx none
2469 * @opflmodify cf,pf,af,zf,sf,of
2470 * @opflclear af,sf,of
2471 */
2472FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2473{
2474 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2476 if (IEM_IS_MODRM_REG_MODE(bRm))
2477 {
2478 /*
2479 * Register, register.
2480 */
2481 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2482 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2483 IEM_MC_LOCAL(uint32_t, fEFlags);
2484 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2485 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2486 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
2487 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2488 IEM_MC_PREPARE_AVX_USAGE();
2489 IEM_MC_FETCH_EFLAGS(fEFlags);
2490 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2491 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2492 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2493 pEFlags, puSrc1, puSrc2);
2494 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2495 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2496 } IEM_MC_ELSE() {
2497 IEM_MC_COMMIT_EFLAGS(fEFlags);
2498 } IEM_MC_ENDIF();
2499
2500 IEM_MC_ADVANCE_RIP_AND_FINISH();
2501 IEM_MC_END();
2502 }
2503 else
2504 {
2505 /*
2506 * Register, memory.
2507 */
2508 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2509 IEM_MC_LOCAL(uint32_t, fEFlags);
2510 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2511 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2512 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2513 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
2514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2515
2516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2517 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2518 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2519 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2520
2521 IEM_MC_PREPARE_AVX_USAGE();
2522 IEM_MC_FETCH_EFLAGS(fEFlags);
2523 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2524 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2525 pEFlags, puSrc1, puSrc2);
2526 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2527 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2528 } IEM_MC_ELSE() {
2529 IEM_MC_COMMIT_EFLAGS(fEFlags);
2530 } IEM_MC_ENDIF();
2531
2532 IEM_MC_ADVANCE_RIP_AND_FINISH();
2533 IEM_MC_END();
2534 }
2535}
2536
2537
2538/**
2539 * @opcode 0x2f
2540 * @oppfx 0x66
2541 * @opflmodify cf,pf,af,zf,sf,of
2542 * @opflclear af,sf,of
2543 */
2544FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2545{
2546 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2548 if (IEM_IS_MODRM_REG_MODE(bRm))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2554 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2555 IEM_MC_LOCAL(uint32_t, fEFlags);
2556 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2557 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2558 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
2559 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2560 IEM_MC_PREPARE_AVX_USAGE();
2561 IEM_MC_FETCH_EFLAGS(fEFlags);
2562 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2563 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2564 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2565 pEFlags, puSrc1, puSrc2);
2566 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2567 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2568 } IEM_MC_ELSE() {
2569 IEM_MC_COMMIT_EFLAGS(fEFlags);
2570 } IEM_MC_ENDIF();
2571
2572 IEM_MC_ADVANCE_RIP_AND_FINISH();
2573 IEM_MC_END();
2574 }
2575 else
2576 {
2577 /*
2578 * Register, memory.
2579 */
2580 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2581 IEM_MC_LOCAL(uint32_t, fEFlags);
2582 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2583 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2584 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2585 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
2586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2587
2588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2589 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2590 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2591 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2592
2593 IEM_MC_PREPARE_AVX_USAGE();
2594 IEM_MC_FETCH_EFLAGS(fEFlags);
2595 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2596 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2597 pEFlags, puSrc1, puSrc2);
2598 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2599 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2600 } IEM_MC_ELSE() {
2601 IEM_MC_COMMIT_EFLAGS(fEFlags);
2602 } IEM_MC_ENDIF();
2603
2604 IEM_MC_ADVANCE_RIP_AND_FINISH();
2605 IEM_MC_END();
2606 }
2607}
2608
2609
2610/* Opcode VEX.F3.0F 0x2f - invalid */
2611/* Opcode VEX.F2.0F 0x2f - invalid */
2612
2613/* Opcode VEX.0F 0x30 - invalid */
2614/* Opcode VEX.0F 0x31 - invalid */
2615/* Opcode VEX.0F 0x32 - invalid */
2616/* Opcode VEX.0F 0x33 - invalid */
2617/* Opcode VEX.0F 0x34 - invalid */
2618/* Opcode VEX.0F 0x35 - invalid */
2619/* Opcode VEX.0F 0x36 - invalid */
2620/* Opcode VEX.0F 0x37 - invalid */
2621/* Opcode VEX.0F 0x38 - invalid */
2622/* Opcode VEX.0F 0x39 - invalid */
2623/* Opcode VEX.0F 0x3a - invalid */
2624/* Opcode VEX.0F 0x3b - invalid */
2625/* Opcode VEX.0F 0x3c - invalid */
2626/* Opcode VEX.0F 0x3d - invalid */
2627/* Opcode VEX.0F 0x3e - invalid */
2628/* Opcode VEX.0F 0x3f - invalid */
2629/* Opcode VEX.0F 0x40 - invalid */
2630/* Opcode VEX.0F 0x41 - invalid */
2631/* Opcode VEX.0F 0x42 - invalid */
2632/* Opcode VEX.0F 0x43 - invalid */
2633/* Opcode VEX.0F 0x44 - invalid */
2634/* Opcode VEX.0F 0x45 - invalid */
2635/* Opcode VEX.0F 0x46 - invalid */
2636/* Opcode VEX.0F 0x47 - invalid */
2637/* Opcode VEX.0F 0x48 - invalid */
2638/* Opcode VEX.0F 0x49 - invalid */
2639/* Opcode VEX.0F 0x4a - invalid */
2640/* Opcode VEX.0F 0x4b - invalid */
2641/* Opcode VEX.0F 0x4c - invalid */
2642/* Opcode VEX.0F 0x4d - invalid */
2643/* Opcode VEX.0F 0x4e - invalid */
2644/* Opcode VEX.0F 0x4f - invalid */
2645
2646
2647/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2648FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2649{
2650 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2652 if (IEM_IS_MODRM_REG_MODE(bRm))
2653 {
2654 /*
2655 * Register, register.
2656 */
2657 if (pVCpu->iem.s.uVexLength == 0)
2658 {
2659 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2660 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2661 IEM_MC_LOCAL(uint8_t, u8Dst);
2662 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2663 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2665 IEM_MC_PREPARE_AVX_USAGE();
2666 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2667 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2668 pu8Dst, puSrc);
2669 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2670 IEM_MC_ADVANCE_RIP_AND_FINISH();
2671 IEM_MC_END();
2672 }
2673 else
2674 {
2675 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2676 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2677 IEM_MC_LOCAL(uint8_t, u8Dst);
2678 IEM_MC_LOCAL(RTUINT256U, uSrc);
2679 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2680 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2681
2682 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2683 IEM_MC_PREPARE_AVX_USAGE();
2684 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2685 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2686 pu8Dst, puSrc);
2687 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2688 IEM_MC_ADVANCE_RIP_AND_FINISH();
2689 IEM_MC_END();
2690 }
2691 }
2692 /* No memory operand. */
2693 else
2694 IEMOP_RAISE_INVALID_OPCODE_RET();
2695}
2696
2697
2698/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2699FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2700{
2701 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2703 if (IEM_IS_MODRM_REG_MODE(bRm))
2704 {
2705 /*
2706 * Register, register.
2707 */
2708 if (pVCpu->iem.s.uVexLength == 0)
2709 {
2710 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2711 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2712 IEM_MC_LOCAL(uint8_t, u8Dst);
2713 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2714 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2715 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2716 IEM_MC_PREPARE_AVX_USAGE();
2717 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2718 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2719 pu8Dst, puSrc);
2720 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2721 IEM_MC_ADVANCE_RIP_AND_FINISH();
2722 IEM_MC_END();
2723 }
2724 else
2725 {
2726 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2727 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2728 IEM_MC_LOCAL(uint8_t, u8Dst);
2729 IEM_MC_LOCAL(RTUINT256U, uSrc);
2730 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2731 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2732
2733 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2734 IEM_MC_PREPARE_AVX_USAGE();
2735 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2736 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2737 pu8Dst, puSrc);
2738 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2739 IEM_MC_ADVANCE_RIP_AND_FINISH();
2740 IEM_MC_END();
2741 }
2742 }
2743 /* No memory operand. */
2744 else
2745 IEMOP_RAISE_INVALID_OPCODE_RET();
2746}
2747
2748
2749/* Opcode VEX.F3.0F 0x50 - invalid */
2750/* Opcode VEX.F2.0F 0x50 - invalid */
2751
2752/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2753FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2754/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2755FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2756/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2757FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2758/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2759FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2760
2761/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2762FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2763/* Opcode VEX.66.0F 0x52 - invalid */
2764/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2765FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2766/* Opcode VEX.F2.0F 0x52 - invalid */
2767
2768/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2769FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2770/* Opcode VEX.66.0F 0x53 - invalid */
2771/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2772FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2773/* Opcode VEX.F2.0F 0x53 - invalid */
2774
2775
2776/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2777FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2778{
2779 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2781 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2782}
2783
2784
2785/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2786FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2787{
2788 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2790 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2791}
2792
2793
2794/* Opcode VEX.F3.0F 0x54 - invalid */
2795/* Opcode VEX.F2.0F 0x54 - invalid */
2796
2797
2798/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2799FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2800{
2801 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2802 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2803 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2804}
2805
2806
2807/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2808FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2809{
2810 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2811 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2812 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2813}
2814
2815
2816/* Opcode VEX.F3.0F 0x55 - invalid */
2817/* Opcode VEX.F2.0F 0x55 - invalid */
2818
2819/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2820FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2821{
2822 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2823 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2824 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2825}
2826
2827
2828/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2829FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2830{
2831 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2834}
2835
2836
2837/* Opcode VEX.F3.0F 0x56 - invalid */
2838/* Opcode VEX.F2.0F 0x56 - invalid */
2839
2840
2841/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2842FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2843{
2844 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2845 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2846 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2847}
2848
2849
2850/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2851FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2852{
2853 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2854 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2855 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2856}
2857
2858
2859/* Opcode VEX.F3.0F 0x57 - invalid */
2860/* Opcode VEX.F2.0F 0x57 - invalid */
2861
2862/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2863FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2864/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2865FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2866/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2867FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2868/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2869FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2870
2871/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2872FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2873/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2874FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2875/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2876FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2877/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2878FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2879
2880/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2881FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2882/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2883FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2884/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2885FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2886/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2887FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2888
2889/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2890FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2891/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2892FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2893/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2894FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2895/* Opcode VEX.F2.0F 0x5b - invalid */
2896
2897/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2898FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2899/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2900FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2901/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2902FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2903/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2904FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2905
2906/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2907FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2908/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2909FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2910/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2911FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2912/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2913FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2914
2915/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2916FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2917/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2918FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2919/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2920FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2921/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2922FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2923
2924/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2925FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2926/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2927FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2928/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2929FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2930/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2931FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2932
2933
2934/* Opcode VEX.0F 0x60 - invalid */
2935
2936
2937/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2938FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2939{
2940 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2941 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2942 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2943}
2944
2945
2946/* Opcode VEX.F3.0F 0x60 - invalid */
2947
2948
2949/* Opcode VEX.0F 0x61 - invalid */
2950
2951
2952/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2953FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2954{
2955 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2956 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2958}
2959
2960
2961/* Opcode VEX.F3.0F 0x61 - invalid */
2962
2963
2964/* Opcode VEX.0F 0x62 - invalid */
2965
2966/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2967FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2968{
2969 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2970 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2971 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2972}
2973
2974
2975/* Opcode VEX.F3.0F 0x62 - invalid */
2976
2977
2978
2979/* Opcode VEX.0F 0x63 - invalid */
2980
2981
2982/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2983FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2984{
2985 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2986 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2988}
2989
2990
2991/* Opcode VEX.F3.0F 0x63 - invalid */
2992
2993/* Opcode VEX.0F 0x64 - invalid */
2994
2995
2996/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3000 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
3001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3002}
3003
3004
3005/* Opcode VEX.F3.0F 0x64 - invalid */
3006
3007/* Opcode VEX.0F 0x65 - invalid */
3008
3009
3010/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3011FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3012{
3013 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3014 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
3015 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3016}
3017
3018
3019/* Opcode VEX.F3.0F 0x65 - invalid */
3020
3021/* Opcode VEX.0F 0x66 - invalid */
3022
3023
3024/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3025FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3026{
3027 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3028 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3029 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3030}
3031
3032
3033/* Opcode VEX.F3.0F 0x66 - invalid */
3034
3035/* Opcode VEX.0F 0x67 - invalid */
3036
3037
3038/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3039FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3040{
3041 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3042 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3043 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3044}
3045
3046
3047/* Opcode VEX.F3.0F 0x67 - invalid */
3048
3049
3050///**
3051// * Common worker for SSE2 instructions on the form:
3052// * pxxxx xmm1, xmm2/mem128
3053// *
3054// * The 2nd operand is the second half of a register, which in the memory case
3055// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3056// * where it may read the full 128 bits or only the upper 64 bits.
3057// *
3058// * Exceptions type 4.
3059// */
3060//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3061//{
3062// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3063// if (IEM_IS_MODRM_REG_MODE(bRm))
3064// {
3065// /*
3066// * Register, register.
3067// */
3068// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3069// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3070// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3071// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3072// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3073// IEM_MC_PREPARE_SSE_USAGE();
3074// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3075// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3076// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3077// IEM_MC_ADVANCE_RIP_AND_FINISH();
3078// IEM_MC_END();
3079// }
3080// else
3081// {
3082// /*
3083// * Register, memory.
3084// */
3085// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3086// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3087// IEM_MC_LOCAL(RTUINT128U, uSrc);
3088// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3089// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3090//
3091// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3092// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3093// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3094// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3095//
3096// IEM_MC_PREPARE_SSE_USAGE();
3097// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3098// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3099//
3100// IEM_MC_ADVANCE_RIP_AND_FINISH();
3101// IEM_MC_END();
3102// }
3103// return VINF_SUCCESS;
3104//}
3105
3106
3107/* Opcode VEX.0F 0x68 - invalid */
3108
3109/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3110FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3111{
3112 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3113 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3114 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3115}
3116
3117
3118/* Opcode VEX.F3.0F 0x68 - invalid */
3119
3120
3121/* Opcode VEX.0F 0x69 - invalid */
3122
3123
3124/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3125FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3126{
3127 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3128 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3129 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3130}
3131
3132
3133/* Opcode VEX.F3.0F 0x69 - invalid */
3134
3135
3136/* Opcode VEX.0F 0x6a - invalid */
3137
3138
3139/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3140FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3141{
3142 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3143 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3144 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3145}
3146
3147
3148/* Opcode VEX.F3.0F 0x6a - invalid */
3149
3150
3151/* Opcode VEX.0F 0x6b - invalid */
3152
3153
3154/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3155FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3156{
3157 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3158 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3159 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3160}
3161
3162
3163/* Opcode VEX.F3.0F 0x6b - invalid */
3164
3165
3166/* Opcode VEX.0F 0x6c - invalid */
3167
3168
3169/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3170FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3171{
3172 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3173 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3174 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3175}
3176
3177
3178/* Opcode VEX.F3.0F 0x6c - invalid */
3179/* Opcode VEX.F2.0F 0x6c - invalid */
3180
3181
3182/* Opcode VEX.0F 0x6d - invalid */
3183
3184
3185/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3186FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3187{
3188 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3189 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3190 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3191}
3192
3193
3194/* Opcode VEX.F3.0F 0x6d - invalid */
3195
3196
3197/* Opcode VEX.0F 0x6e - invalid */
3198
3199FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3200{
3201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3202 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3203 {
3204 /**
3205 * @opcode 0x6e
3206 * @opcodesub rex.w=1
3207 * @oppfx 0x66
3208 * @opcpuid avx
3209 * @opgroup og_avx_simdint_datamov
3210 * @opxcpttype 5
3211 * @optest 64-bit / op1=1 op2=2 -> op1=2
3212 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3213 */
3214 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3215 if (IEM_IS_MODRM_REG_MODE(bRm))
3216 {
3217 /* XMM, greg64 */
3218 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3219 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3220 IEM_MC_LOCAL(uint64_t, u64Tmp);
3221
3222 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3223 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3224
3225 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3226 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3227
3228 IEM_MC_ADVANCE_RIP_AND_FINISH();
3229 IEM_MC_END();
3230 }
3231 else
3232 {
3233 /* XMM, [mem64] */
3234 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3240 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3241 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3242
3243 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3244 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3245
3246 IEM_MC_ADVANCE_RIP_AND_FINISH();
3247 IEM_MC_END();
3248 }
3249 }
3250 else
3251 {
3252 /**
3253 * @opdone
3254 * @opcode 0x6e
3255 * @opcodesub rex.w=0
3256 * @oppfx 0x66
3257 * @opcpuid avx
3258 * @opgroup og_avx_simdint_datamov
3259 * @opxcpttype 5
3260 * @opfunction iemOp_vmovd_q_Vy_Ey
3261 * @optest op1=1 op2=2 -> op1=2
3262 * @optest op1=0 op2=-42 -> op1=-42
3263 */
3264 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3265 if (IEM_IS_MODRM_REG_MODE(bRm))
3266 {
3267 /* XMM, greg32 */
3268 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3269 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3270 IEM_MC_LOCAL(uint32_t, u32Tmp);
3271
3272 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3273 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3274
3275 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3276 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3277
3278 IEM_MC_ADVANCE_RIP_AND_FINISH();
3279 IEM_MC_END();
3280 }
3281 else
3282 {
3283 /* XMM, [mem32] */
3284 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3286 IEM_MC_LOCAL(uint32_t, u32Tmp);
3287
3288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3289 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3290 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3291 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3292
3293 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3294 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3295
3296 IEM_MC_ADVANCE_RIP_AND_FINISH();
3297 IEM_MC_END();
3298 }
3299 }
3300}
3301
3302
3303/* Opcode VEX.F3.0F 0x6e - invalid */
3304
3305
3306/* Opcode VEX.0F 0x6f - invalid */
3307
3308/**
3309 * @opcode 0x6f
3310 * @oppfx 0x66
3311 * @opcpuid avx
3312 * @opgroup og_avx_simdint_datamove
3313 * @opxcpttype 1
3314 * @optest op1=1 op2=2 -> op1=2
3315 * @optest op1=0 op2=-42 -> op1=-42
3316 */
3317FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3318{
3319 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3320 Assert(pVCpu->iem.s.uVexLength <= 1);
3321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3322 if (IEM_IS_MODRM_REG_MODE(bRm))
3323 {
3324 /*
3325 * Register, register.
3326 */
3327 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3329
3330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3332 if (pVCpu->iem.s.uVexLength == 0)
3333 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3334 IEM_GET_MODRM_RM(pVCpu, bRm));
3335 else
3336 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3337 IEM_GET_MODRM_RM(pVCpu, bRm));
3338 IEM_MC_ADVANCE_RIP_AND_FINISH();
3339 IEM_MC_END();
3340 }
3341 else if (pVCpu->iem.s.uVexLength == 0)
3342 {
3343 /*
3344 * Register, memory128.
3345 */
3346 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3347 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3349
3350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3351 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3353 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3354
3355 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3356 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3357
3358 IEM_MC_ADVANCE_RIP_AND_FINISH();
3359 IEM_MC_END();
3360 }
3361 else
3362 {
3363 /*
3364 * Register, memory256.
3365 */
3366 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3367 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3369
3370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3371 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3374
3375 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3376 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3377
3378 IEM_MC_ADVANCE_RIP_AND_FINISH();
3379 IEM_MC_END();
3380 }
3381}
3382
3383/**
3384 * @opcode 0x6f
3385 * @oppfx 0xf3
3386 * @opcpuid avx
3387 * @opgroup og_avx_simdint_datamove
3388 * @opxcpttype 4UA
3389 * @optest op1=1 op2=2 -> op1=2
3390 * @optest op1=0 op2=-42 -> op1=-42
3391 */
3392FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3393{
3394 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3395 Assert(pVCpu->iem.s.uVexLength <= 1);
3396 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3397 if (IEM_IS_MODRM_REG_MODE(bRm))
3398 {
3399 /*
3400 * Register, register.
3401 */
3402 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3403 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3404
3405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3407 if (pVCpu->iem.s.uVexLength == 0)
3408 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3409 IEM_GET_MODRM_RM(pVCpu, bRm));
3410 else
3411 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3412 IEM_GET_MODRM_RM(pVCpu, bRm));
3413 IEM_MC_ADVANCE_RIP_AND_FINISH();
3414 IEM_MC_END();
3415 }
3416 else if (pVCpu->iem.s.uVexLength == 0)
3417 {
3418 /*
3419 * Register, memory128.
3420 */
3421 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3422 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3424
3425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3428 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3429
3430 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3431 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3432
3433 IEM_MC_ADVANCE_RIP_AND_FINISH();
3434 IEM_MC_END();
3435 }
3436 else
3437 {
3438 /*
3439 * Register, memory256.
3440 */
3441 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3442 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3444
3445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3446 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3447 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3448 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3449
3450 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3451 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3452
3453 IEM_MC_ADVANCE_RIP_AND_FINISH();
3454 IEM_MC_END();
3455 }
3456}
3457
3458
3459/* Opcode VEX.0F 0x70 - invalid */
3460
3461
3462/**
3463 * Common worker for AVX/AVX2 instructions on the forms:
3464 * - vpxxx xmm0, xmm2/mem128, imm8
3465 * - vpxxx ymm0, ymm2/mem256, imm8
3466 *
3467 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3468 */
3469FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3470{
3471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3472 if (IEM_IS_MODRM_REG_MODE(bRm))
3473 {
3474 /*
3475 * Register, register.
3476 */
3477 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3478 if (pVCpu->iem.s.uVexLength)
3479 {
3480 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3481 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3482 IEM_MC_LOCAL(RTUINT256U, uDst);
3483 IEM_MC_LOCAL(RTUINT256U, uSrc);
3484 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3485 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3486 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3487 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3488 IEM_MC_PREPARE_AVX_USAGE();
3489 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3490 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3491 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3498 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3499 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3500 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3501 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3502 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3503 IEM_MC_PREPARE_AVX_USAGE();
3504 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3505 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3506 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3507 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3508 IEM_MC_ADVANCE_RIP_AND_FINISH();
3509 IEM_MC_END();
3510 }
3511 }
3512 else
3513 {
3514 /*
3515 * Register, memory.
3516 */
3517 if (pVCpu->iem.s.uVexLength)
3518 {
3519 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3520 IEM_MC_LOCAL(RTUINT256U, uDst);
3521 IEM_MC_LOCAL(RTUINT256U, uSrc);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3523 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3524 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3525
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3527 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3528 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3531 IEM_MC_PREPARE_AVX_USAGE();
3532
3533 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3534 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3535 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3536
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 else
3541 {
3542 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3543 IEM_MC_LOCAL(RTUINT128U, uSrc);
3544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3545 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3546 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3547
3548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3549 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3550 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3551 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3552 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3553 IEM_MC_PREPARE_AVX_USAGE();
3554
3555 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3556 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3557 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3558 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3559
3560 IEM_MC_ADVANCE_RIP_AND_FINISH();
3561 IEM_MC_END();
3562 }
3563 }
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3568FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3569{
3570 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3572 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3573
3574}
3575
3576
3577/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3578FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3579{
3580 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3581 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3582 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3583
3584}
3585
3586
3587/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3588FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3589{
3590 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3591 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3592 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3593}
3594
3595
3596/**
3597 * Common worker(s) for AVX/AVX2 instructions on the forms:
3598 * - vpxxx xmm0, xmm2, imm8
3599 * - vpxxx ymm0, ymm2, imm8
3600 *
3601 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3602 */
3603FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3604{
3605 if (IEM_IS_MODRM_REG_MODE(bRm))
3606 {
3607 /*
3608 * Register, register.
3609 */
3610 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3611 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3612 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3613 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3614 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3615 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3617 IEM_MC_PREPARE_AVX_USAGE();
3618 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3619 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3620 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3621 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3622 IEM_MC_ADVANCE_RIP_AND_FINISH();
3623 IEM_MC_END();
3624 }
3625 /* No memory operand. */
3626 else
3627 IEMOP_RAISE_INVALID_OPCODE_RET();
3628}
3629
3630FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3631{
3632 if (IEM_IS_MODRM_REG_MODE(bRm))
3633 {
3634 /*
3635 * Register, register.
3636 */
3637 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3638 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3639 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3640 IEM_MC_LOCAL(RTUINT256U, uDst);
3641 IEM_MC_LOCAL(RTUINT256U, uSrc);
3642 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3643 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3644 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3646 IEM_MC_PREPARE_AVX_USAGE();
3647 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3648 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3649 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3650 IEM_MC_ADVANCE_RIP_AND_FINISH();
3651 IEM_MC_END();
3652 }
3653 /* No memory operand. */
3654 else
3655 IEMOP_RAISE_INVALID_OPCODE_RET();
3656}
3657
3658
3659/* Opcode VEX.0F 0x71 11/2 - invalid. */
3660/** Opcode VEX.66.0F 0x71 11/2. */
3661FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3662{
3663 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3664 if (pVCpu->iem.s.uVexLength)
3665 {
3666 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3667 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3668 }
3669 else
3670 {
3671 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3672 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3673 }
3674}
3675
3676
3677/* Opcode VEX.0F 0x71 11/4 - invalid */
3678/** Opcode VEX.66.0F 0x71 11/4. */
3679FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3680{
3681 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3682 if (pVCpu->iem.s.uVexLength)
3683 {
3684 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3685 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3686 }
3687 else
3688 {
3689 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3690 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3691 }
3692}
3693
3694/* Opcode VEX.0F 0x71 11/6 - invalid */
3695
3696/** Opcode VEX.66.0F 0x71 11/6. */
3697FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3698{
3699 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3700 if (pVCpu->iem.s.uVexLength)
3701 {
3702 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3703 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3704 }
3705 else
3706 {
3707 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3708 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3709 }
3710}
3711
3712
3713/**
3714 * VEX Group 12 jump table for register variant.
3715 */
3716IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3717{
3718 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3719 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3720 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3721 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3722 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3723 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3724 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3725 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3726};
3727AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3728
3729
3730/** Opcode VEX.0F 0x71. */
3731FNIEMOP_DEF(iemOp_VGrp12)
3732{
3733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3734 if (IEM_IS_MODRM_REG_MODE(bRm))
3735 /* register, register */
3736 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3737 + pVCpu->iem.s.idxPrefix], bRm);
3738 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3739}
3740
3741
3742/* Opcode VEX.0F 0x72 11/2 - invalid. */
3743/** Opcode VEX.66.0F 0x72 11/2. */
3744FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3745{
3746 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3747 if (pVCpu->iem.s.uVexLength)
3748 {
3749 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3750 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3751 }
3752 else
3753 {
3754 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3755 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3756 }
3757}
3758
3759
3760/* Opcode VEX.0F 0x72 11/4 - invalid. */
3761/** Opcode VEX.66.0F 0x72 11/4. */
3762FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3763{
3764 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3765 if (pVCpu->iem.s.uVexLength)
3766 {
3767 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3768 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3769 }
3770 else
3771 {
3772 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3773 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3774 }
3775}
3776
3777/* Opcode VEX.0F 0x72 11/6 - invalid. */
3778
3779/** Opcode VEX.66.0F 0x72 11/6. */
3780FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3781{
3782 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3783 if (pVCpu->iem.s.uVexLength)
3784 {
3785 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3786 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3787 }
3788 else
3789 {
3790 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3791 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3792 }
3793}
3794
3795
3796/**
3797 * Group 13 jump table for register variant.
3798 */
3799IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3800{
3801 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3802 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3803 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3804 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3805 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3806 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3807 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3808 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3809};
3810AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3811
3812/** Opcode VEX.0F 0x72. */
3813FNIEMOP_DEF(iemOp_VGrp13)
3814{
3815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3816 if (IEM_IS_MODRM_REG_MODE(bRm))
3817 /* register, register */
3818 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3819 + pVCpu->iem.s.idxPrefix], bRm);
3820 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3821}
3822
3823
3824/* Opcode VEX.0F 0x73 11/2 - invalid. */
3825/** Opcode VEX.66.0F 0x73 11/2. */
3826FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3827{
3828 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3829 if (pVCpu->iem.s.uVexLength)
3830 {
3831 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3832 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3833 }
3834 else
3835 {
3836 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3837 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3838 }
3839}
3840
3841
3842/** Opcode VEX.66.0F 0x73 11/3. */
3843FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3844
3845/* Opcode VEX.0F 0x73 11/6 - invalid. */
3846
3847/** Opcode VEX.66.0F 0x73 11/6. */
3848FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3849{
3850 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3851 if (pVCpu->iem.s.uVexLength)
3852 {
3853 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3854 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3855 }
3856 else
3857 {
3858 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3859 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3860 }
3861}
3862
3863/** Opcode VEX.66.0F 0x73 11/7. */
3864FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3865
3866/**
3867 * Group 14 jump table for register variant.
3868 */
3869IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3870{
3871 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3872 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3873 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3874 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3875 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3876 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3877 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3878 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3879};
3880AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3881
3882
3883/** Opcode VEX.0F 0x73. */
3884FNIEMOP_DEF(iemOp_VGrp14)
3885{
3886 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3887 if (IEM_IS_MODRM_REG_MODE(bRm))
3888 /* register, register */
3889 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3890 + pVCpu->iem.s.idxPrefix], bRm);
3891 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3892}
3893
3894
3895/* Opcode VEX.0F 0x74 - invalid */
3896
3897
3898/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3899FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3900{
3901 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3902 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
3903 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3904}
3905
3906/* Opcode VEX.F3.0F 0x74 - invalid */
3907/* Opcode VEX.F2.0F 0x74 - invalid */
3908
3909
3910/* Opcode VEX.0F 0x75 - invalid */
3911
3912
3913/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3914FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3915{
3916 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3917 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
3918 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3919}
3920
3921
3922/* Opcode VEX.F3.0F 0x75 - invalid */
3923/* Opcode VEX.F2.0F 0x75 - invalid */
3924
3925
3926/* Opcode VEX.0F 0x76 - invalid */
3927
3928
3929/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3930FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3931{
3932 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3933 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
3934 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3935}
3936
3937
3938/* Opcode VEX.F3.0F 0x76 - invalid */
3939/* Opcode VEX.F2.0F 0x76 - invalid */
3940
3941
3942/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3943FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3944{
3945 Assert(pVCpu->iem.s.uVexLength <= 1);
3946 if (pVCpu->iem.s.uVexLength == 0)
3947 {
3948 /*
3949 * 128-bit: vzeroupper
3950 */
3951 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3952 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3953
3954 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3955 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3956 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3957
3958 IEM_MC_CLEAR_YREG_128_UP(0);
3959 IEM_MC_CLEAR_YREG_128_UP(1);
3960 IEM_MC_CLEAR_YREG_128_UP(2);
3961 IEM_MC_CLEAR_YREG_128_UP(3);
3962 IEM_MC_CLEAR_YREG_128_UP(4);
3963 IEM_MC_CLEAR_YREG_128_UP(5);
3964 IEM_MC_CLEAR_YREG_128_UP(6);
3965 IEM_MC_CLEAR_YREG_128_UP(7);
3966
3967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3968 {
3969 IEM_MC_CLEAR_YREG_128_UP( 8);
3970 IEM_MC_CLEAR_YREG_128_UP( 9);
3971 IEM_MC_CLEAR_YREG_128_UP(10);
3972 IEM_MC_CLEAR_YREG_128_UP(11);
3973 IEM_MC_CLEAR_YREG_128_UP(12);
3974 IEM_MC_CLEAR_YREG_128_UP(13);
3975 IEM_MC_CLEAR_YREG_128_UP(14);
3976 IEM_MC_CLEAR_YREG_128_UP(15);
3977 }
3978
3979 IEM_MC_ADVANCE_RIP_AND_FINISH();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /*
3985 * 256-bit: vzeroall
3986 */
3987 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3988 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3989
3990 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3993
3994 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3995 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3996 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3997 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3998 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3999 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4000 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4001 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4002 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4003
4004 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4005 {
4006 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4007 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4008 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4009 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4010 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4011 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4012 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4013 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4014 }
4015
4016 IEM_MC_ADVANCE_RIP_AND_FINISH();
4017 IEM_MC_END();
4018 }
4019}
4020
4021
4022/* Opcode VEX.66.0F 0x77 - invalid */
4023/* Opcode VEX.F3.0F 0x77 - invalid */
4024/* Opcode VEX.F2.0F 0x77 - invalid */
4025
4026/* Opcode VEX.0F 0x78 - invalid */
4027/* Opcode VEX.66.0F 0x78 - invalid */
4028/* Opcode VEX.F3.0F 0x78 - invalid */
4029/* Opcode VEX.F2.0F 0x78 - invalid */
4030
4031/* Opcode VEX.0F 0x79 - invalid */
4032/* Opcode VEX.66.0F 0x79 - invalid */
4033/* Opcode VEX.F3.0F 0x79 - invalid */
4034/* Opcode VEX.F2.0F 0x79 - invalid */
4035
4036/* Opcode VEX.0F 0x7a - invalid */
4037/* Opcode VEX.66.0F 0x7a - invalid */
4038/* Opcode VEX.F3.0F 0x7a - invalid */
4039/* Opcode VEX.F2.0F 0x7a - invalid */
4040
4041/* Opcode VEX.0F 0x7b - invalid */
4042/* Opcode VEX.66.0F 0x7b - invalid */
4043/* Opcode VEX.F3.0F 0x7b - invalid */
4044/* Opcode VEX.F2.0F 0x7b - invalid */
4045
4046/* Opcode VEX.0F 0x7c - invalid */
4047/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4048FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4049/* Opcode VEX.F3.0F 0x7c - invalid */
4050/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4051FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
4052
4053/* Opcode VEX.0F 0x7d - invalid */
4054/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4055FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4056/* Opcode VEX.F3.0F 0x7d - invalid */
4057/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4058FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4059
4060
4061/* Opcode VEX.0F 0x7e - invalid */
4062
4063FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4064{
4065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4066 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4067 {
4068 /**
4069 * @opcode 0x7e
4070 * @opcodesub rex.w=1
4071 * @oppfx 0x66
4072 * @opcpuid avx
4073 * @opgroup og_avx_simdint_datamov
4074 * @opxcpttype 5
4075 * @optest 64-bit / op1=1 op2=2 -> op1=2
4076 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4077 */
4078 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4079 if (IEM_IS_MODRM_REG_MODE(bRm))
4080 {
4081 /* greg64, XMM */
4082 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4083 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4084 IEM_MC_LOCAL(uint64_t, u64Tmp);
4085
4086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4088
4089 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4090 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4091
4092 IEM_MC_ADVANCE_RIP_AND_FINISH();
4093 IEM_MC_END();
4094 }
4095 else
4096 {
4097 /* [mem64], XMM */
4098 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4100 IEM_MC_LOCAL(uint64_t, u64Tmp);
4101
4102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4103 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4105 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4106
4107 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4108 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4109
4110 IEM_MC_ADVANCE_RIP_AND_FINISH();
4111 IEM_MC_END();
4112 }
4113 }
4114 else
4115 {
4116 /**
4117 * @opdone
4118 * @opcode 0x7e
4119 * @opcodesub rex.w=0
4120 * @oppfx 0x66
4121 * @opcpuid avx
4122 * @opgroup og_avx_simdint_datamov
4123 * @opxcpttype 5
4124 * @opfunction iemOp_vmovd_q_Vy_Ey
4125 * @optest op1=1 op2=2 -> op1=2
4126 * @optest op1=0 op2=-42 -> op1=-42
4127 */
4128 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4129 if (IEM_IS_MODRM_REG_MODE(bRm))
4130 {
4131 /* greg32, XMM */
4132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4133 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4134 IEM_MC_LOCAL(uint32_t, u32Tmp);
4135
4136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4137 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4138
4139 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4140 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4141
4142 IEM_MC_ADVANCE_RIP_AND_FINISH();
4143 IEM_MC_END();
4144 }
4145 else
4146 {
4147 /* [mem32], XMM */
4148 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4150 IEM_MC_LOCAL(uint32_t, u32Tmp);
4151
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4154 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4155 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4156
4157 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4158 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4159
4160 IEM_MC_ADVANCE_RIP_AND_FINISH();
4161 IEM_MC_END();
4162 }
4163 }
4164}
4165
4166
4167/**
4168 * @opcode 0x7e
4169 * @oppfx 0xf3
4170 * @opcpuid avx
4171 * @opgroup og_avx_pcksclr_datamove
4172 * @opxcpttype none
4173 * @optest op1=1 op2=2 -> op1=2
4174 * @optest op1=0 op2=-42 -> op1=-42
4175 */
4176FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4177{
4178 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180 if (IEM_IS_MODRM_REG_MODE(bRm))
4181 {
4182 /*
4183 * Register, register.
4184 */
4185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4186 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4187
4188 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4189 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4190
4191 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4192 IEM_GET_MODRM_RM(pVCpu, bRm));
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 /*
4199 * Memory, register.
4200 */
4201 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4202 IEM_MC_LOCAL(uint64_t, uSrc);
4203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4204
4205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4206 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4207 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4208 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4209
4210 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4211 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4212
4213 IEM_MC_ADVANCE_RIP_AND_FINISH();
4214 IEM_MC_END();
4215 }
4216
4217}
4218/* Opcode VEX.F2.0F 0x7e - invalid */
4219
4220
4221/* Opcode VEX.0F 0x7f - invalid */
4222
4223/**
4224 * @opcode 0x7f
4225 * @oppfx 0x66
4226 * @opcpuid avx
4227 * @opgroup og_avx_simdint_datamove
4228 * @opxcpttype 1
4229 * @optest op1=1 op2=2 -> op1=2
4230 * @optest op1=0 op2=-42 -> op1=-42
4231 */
4232FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4233{
4234 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4235 Assert(pVCpu->iem.s.uVexLength <= 1);
4236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4237 if (IEM_IS_MODRM_REG_MODE(bRm))
4238 {
4239 /*
4240 * Register, register.
4241 */
4242 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4243 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4244
4245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4247 if (pVCpu->iem.s.uVexLength == 0)
4248 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4249 IEM_GET_MODRM_REG(pVCpu, bRm));
4250 else
4251 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4252 IEM_GET_MODRM_REG(pVCpu, bRm));
4253 IEM_MC_ADVANCE_RIP_AND_FINISH();
4254 IEM_MC_END();
4255 }
4256 else if (pVCpu->iem.s.uVexLength == 0)
4257 {
4258 /*
4259 * Register, memory128.
4260 */
4261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4262 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4264
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4266 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4269
4270 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4271 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4272
4273 IEM_MC_ADVANCE_RIP_AND_FINISH();
4274 IEM_MC_END();
4275 }
4276 else
4277 {
4278 /*
4279 * Register, memory256.
4280 */
4281 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4282 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4284
4285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4286 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4288 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4289
4290 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4291 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4292
4293 IEM_MC_ADVANCE_RIP_AND_FINISH();
4294 IEM_MC_END();
4295 }
4296}
4297
4298
4299/**
4300 * @opcode 0x7f
4301 * @oppfx 0xf3
4302 * @opcpuid avx
4303 * @opgroup og_avx_simdint_datamove
4304 * @opxcpttype 4UA
4305 * @optest op1=1 op2=2 -> op1=2
4306 * @optest op1=0 op2=-42 -> op1=-42
4307 */
4308FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4309{
4310 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4311 Assert(pVCpu->iem.s.uVexLength <= 1);
4312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4313 if (IEM_IS_MODRM_REG_MODE(bRm))
4314 {
4315 /*
4316 * Register, register.
4317 */
4318 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4319 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4320
4321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4323 if (pVCpu->iem.s.uVexLength == 0)
4324 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4325 IEM_GET_MODRM_REG(pVCpu, bRm));
4326 else
4327 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4328 IEM_GET_MODRM_REG(pVCpu, bRm));
4329 IEM_MC_ADVANCE_RIP_AND_FINISH();
4330 IEM_MC_END();
4331 }
4332 else if (pVCpu->iem.s.uVexLength == 0)
4333 {
4334 /*
4335 * Register, memory128.
4336 */
4337 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4338 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4340
4341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4342 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4343 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4344 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4345
4346 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4347 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4348
4349 IEM_MC_ADVANCE_RIP_AND_FINISH();
4350 IEM_MC_END();
4351 }
4352 else
4353 {
4354 /*
4355 * Register, memory256.
4356 */
4357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4358 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4360
4361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4362 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4365
4366 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4367 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4368
4369 IEM_MC_ADVANCE_RIP_AND_FINISH();
4370 IEM_MC_END();
4371 }
4372}
4373
4374/* Opcode VEX.F2.0F 0x7f - invalid */
4375
4376
4377/* Opcode VEX.0F 0x80 - invalid */
4378/* Opcode VEX.0F 0x81 - invalid */
4379/* Opcode VEX.0F 0x82 - invalid */
4380/* Opcode VEX.0F 0x83 - invalid */
4381/* Opcode VEX.0F 0x84 - invalid */
4382/* Opcode VEX.0F 0x85 - invalid */
4383/* Opcode VEX.0F 0x86 - invalid */
4384/* Opcode VEX.0F 0x87 - invalid */
4385/* Opcode VEX.0F 0x88 - invalid */
4386/* Opcode VEX.0F 0x89 - invalid */
4387/* Opcode VEX.0F 0x8a - invalid */
4388/* Opcode VEX.0F 0x8b - invalid */
4389/* Opcode VEX.0F 0x8c - invalid */
4390/* Opcode VEX.0F 0x8d - invalid */
4391/* Opcode VEX.0F 0x8e - invalid */
4392/* Opcode VEX.0F 0x8f - invalid */
4393/* Opcode VEX.0F 0x90 - invalid */
4394/* Opcode VEX.0F 0x91 - invalid */
4395/* Opcode VEX.0F 0x92 - invalid */
4396/* Opcode VEX.0F 0x93 - invalid */
4397/* Opcode VEX.0F 0x94 - invalid */
4398/* Opcode VEX.0F 0x95 - invalid */
4399/* Opcode VEX.0F 0x96 - invalid */
4400/* Opcode VEX.0F 0x97 - invalid */
4401/* Opcode VEX.0F 0x98 - invalid */
4402/* Opcode VEX.0F 0x99 - invalid */
4403/* Opcode VEX.0F 0x9a - invalid */
4404/* Opcode VEX.0F 0x9b - invalid */
4405/* Opcode VEX.0F 0x9c - invalid */
4406/* Opcode VEX.0F 0x9d - invalid */
4407/* Opcode VEX.0F 0x9e - invalid */
4408/* Opcode VEX.0F 0x9f - invalid */
4409/* Opcode VEX.0F 0xa0 - invalid */
4410/* Opcode VEX.0F 0xa1 - invalid */
4411/* Opcode VEX.0F 0xa2 - invalid */
4412/* Opcode VEX.0F 0xa3 - invalid */
4413/* Opcode VEX.0F 0xa4 - invalid */
4414/* Opcode VEX.0F 0xa5 - invalid */
4415/* Opcode VEX.0F 0xa6 - invalid */
4416/* Opcode VEX.0F 0xa7 - invalid */
4417/* Opcode VEX.0F 0xa8 - invalid */
4418/* Opcode VEX.0F 0xa9 - invalid */
4419/* Opcode VEX.0F 0xaa - invalid */
4420/* Opcode VEX.0F 0xab - invalid */
4421/* Opcode VEX.0F 0xac - invalid */
4422/* Opcode VEX.0F 0xad - invalid */
4423
4424
4425/* Opcode VEX.0F 0xae mem/0 - invalid. */
4426/* Opcode VEX.0F 0xae mem/1 - invalid. */
4427
4428/**
4429 * @ opmaps grp15
4430 * @ opcode !11/2
4431 * @ oppfx none
4432 * @ opcpuid sse
4433 * @ opgroup og_sse_mxcsrsm
4434 * @ opxcpttype 5
4435 * @ optest op1=0 -> mxcsr=0
4436 * @ optest op1=0x2083 -> mxcsr=0x2083
4437 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4438 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4439 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4440 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4441 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4442 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4443 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4444 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4445 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4446 */
4447FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4448//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4449//{
4450// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4451// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4452// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4453// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4454// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4455// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4456// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4457// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4458// IEM_MC_END();
4459// return VINF_SUCCESS;
4460//}
4461
4462
4463/**
4464 * @opmaps vexgrp15
4465 * @opcode !11/3
4466 * @oppfx none
4467 * @opcpuid avx
4468 * @opgroup og_avx_mxcsrsm
4469 * @opxcpttype 5
4470 * @optest mxcsr=0 -> op1=0
4471 * @optest mxcsr=0x2083 -> op1=0x2083
4472 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4473 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4474 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4475 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4476 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4477 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4478 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4479 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4480 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4481 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4482 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4483 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4484 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4485 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4486 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4487 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4488 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4489 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4490 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4491 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4492 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4493 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4494 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4495 * -> value.xcpt=0x6
4496 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4497 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4498 * APMv4 rev 3.17 page 509.
4499 * @todo Test this instruction on AMD Ryzen.
4500 */
4501FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4502{
4503 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4504 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4505 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4507 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4508 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4510 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4511 IEM_MC_END();
4512}
4513
4514/* Opcode VEX.0F 0xae mem/4 - invalid. */
4515/* Opcode VEX.0F 0xae mem/5 - invalid. */
4516/* Opcode VEX.0F 0xae mem/6 - invalid. */
4517/* Opcode VEX.0F 0xae mem/7 - invalid. */
4518
4519/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4520/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4521/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4522/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4523/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4524/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4525/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4526/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4527
4528/**
4529 * Vex group 15 jump table for memory variant.
4530 */
4531IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4532{ /* pfx: none, 066h, 0f3h, 0f2h */
4533 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4534 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4535 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4536 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4537 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4538 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4539 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4540 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4541};
4542AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4543
4544
4545/** Opcode vex. 0xae. */
4546FNIEMOP_DEF(iemOp_VGrp15)
4547{
4548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4549 if (IEM_IS_MODRM_REG_MODE(bRm))
4550 /* register, register */
4551 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4552
4553 /* memory, register */
4554 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4555 + pVCpu->iem.s.idxPrefix], bRm);
4556}
4557
4558
4559/* Opcode VEX.0F 0xaf - invalid. */
4560
4561/* Opcode VEX.0F 0xb0 - invalid. */
4562/* Opcode VEX.0F 0xb1 - invalid. */
4563/* Opcode VEX.0F 0xb2 - invalid. */
4564/* Opcode VEX.0F 0xb2 - invalid. */
4565/* Opcode VEX.0F 0xb3 - invalid. */
4566/* Opcode VEX.0F 0xb4 - invalid. */
4567/* Opcode VEX.0F 0xb5 - invalid. */
4568/* Opcode VEX.0F 0xb6 - invalid. */
4569/* Opcode VEX.0F 0xb7 - invalid. */
4570/* Opcode VEX.0F 0xb8 - invalid. */
4571/* Opcode VEX.0F 0xb9 - invalid. */
4572/* Opcode VEX.0F 0xba - invalid. */
4573/* Opcode VEX.0F 0xbb - invalid. */
4574/* Opcode VEX.0F 0xbc - invalid. */
4575/* Opcode VEX.0F 0xbd - invalid. */
4576/* Opcode VEX.0F 0xbe - invalid. */
4577/* Opcode VEX.0F 0xbf - invalid. */
4578
4579/* Opcode VEX.0F 0xc0 - invalid. */
4580/* Opcode VEX.66.0F 0xc0 - invalid. */
4581/* Opcode VEX.F3.0F 0xc0 - invalid. */
4582/* Opcode VEX.F2.0F 0xc0 - invalid. */
4583
4584/* Opcode VEX.0F 0xc1 - invalid. */
4585/* Opcode VEX.66.0F 0xc1 - invalid. */
4586/* Opcode VEX.F3.0F 0xc1 - invalid. */
4587/* Opcode VEX.F2.0F 0xc1 - invalid. */
4588
4589/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4590FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4591/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4592FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4593/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4594FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4595/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4596FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4597
4598/* Opcode VEX.0F 0xc3 - invalid */
4599/* Opcode VEX.66.0F 0xc3 - invalid */
4600/* Opcode VEX.F3.0F 0xc3 - invalid */
4601/* Opcode VEX.F2.0F 0xc3 - invalid */
4602
4603/* Opcode VEX.0F 0xc4 - invalid */
4604
4605
4606/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4607FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4608{
4609 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4611 if (IEM_IS_MODRM_REG_MODE(bRm))
4612 {
4613 /*
4614 * Register, register.
4615 */
4616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4617 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4618 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4619 IEM_MC_LOCAL(uint16_t, uValue);
4620
4621 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4623 IEM_MC_PREPARE_AVX_USAGE();
4624
4625 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4626 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
4627 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4628 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4629 IEM_MC_ADVANCE_RIP_AND_FINISH();
4630 IEM_MC_END();
4631 }
4632 else
4633 {
4634 /*
4635 * Register, memory.
4636 */
4637 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4639 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4640 IEM_MC_LOCAL(uint16_t, uValue);
4641
4642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4643 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4644 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4646 IEM_MC_PREPARE_AVX_USAGE();
4647
4648 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4649 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4650 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4651 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4652 IEM_MC_ADVANCE_RIP_AND_FINISH();
4653 IEM_MC_END();
4654 }
4655}
4656
4657
4658/* Opcode VEX.F3.0F 0xc4 - invalid */
4659/* Opcode VEX.F2.0F 0xc4 - invalid */
4660
4661/* Opcode VEX.0F 0xc5 - invalid */
4662
4663
4664/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4665FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4666{
4667 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4669 if (IEM_IS_MODRM_REG_MODE(bRm))
4670 {
4671 /*
4672 * greg32, XMM, imm8.
4673 */
4674 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4675 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4676 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4677 IEM_MC_LOCAL(uint16_t, uValue);
4678 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4679 IEM_MC_PREPARE_AVX_USAGE();
4680 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4682 IEM_MC_ADVANCE_RIP_AND_FINISH();
4683 IEM_MC_END();
4684 }
4685 /* No memory operand. */
4686 else
4687 IEMOP_RAISE_INVALID_OPCODE_RET();
4688}
4689
4690
4691/* Opcode VEX.F3.0F 0xc5 - invalid */
4692/* Opcode VEX.F2.0F 0xc5 - invalid */
4693
4694
4695#define VSHUFP_X(a_Instr) \
4696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4697 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4698 { \
4699 /* \
4700 * Register, register. \
4701 */ \
4702 if (pVCpu->iem.s.uVexLength) \
4703 { \
4704 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4705 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4706 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4707 IEM_MC_LOCAL(RTUINT256U, uDst); \
4708 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4709 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4710 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4711 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4712 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4713 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4714 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4715 IEM_MC_PREPARE_AVX_USAGE(); \
4716 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4717 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4718 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4719 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4720 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4721 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4722 IEM_MC_END(); \
4723 } \
4724 else \
4725 { \
4726 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4727 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4728 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4729 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4730 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4731 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4732 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4733 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4734 IEM_MC_PREPARE_AVX_USAGE(); \
4735 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4736 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4737 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4738 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4739 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4740 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4742 IEM_MC_END(); \
4743 } \
4744 } \
4745 else \
4746 { \
4747 /* \
4748 * Register, memory. \
4749 */ \
4750 if (pVCpu->iem.s.uVexLength) \
4751 { \
4752 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4753 IEM_MC_LOCAL(RTUINT256U, uDst); \
4754 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4755 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4757 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4758 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4759 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4762 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4763 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4764 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4765 IEM_MC_PREPARE_AVX_USAGE(); \
4766 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4767 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4768 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4769 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4770 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4771 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4772 IEM_MC_END(); \
4773 } \
4774 else \
4775 { \
4776 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4777 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4779 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4780 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4781 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4783 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4784 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4785 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4786 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4787 IEM_MC_PREPARE_AVX_USAGE(); \
4788 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4789 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4790 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4791 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4792 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4793 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4795 IEM_MC_END(); \
4796 } \
4797 } \
4798 (void)0
4799
4800/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4801FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4802{
4803 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4804 VSHUFP_X(vshufps);
4805}
4806
4807
4808/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4809FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4810{
4811 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4812 VSHUFP_X(vshufpd);
4813}
4814#undef VSHUFP_X
4815
4816
4817/* Opcode VEX.F3.0F 0xc6 - invalid */
4818/* Opcode VEX.F2.0F 0xc6 - invalid */
4819
4820/* Opcode VEX.0F 0xc7 - invalid */
4821/* Opcode VEX.66.0F 0xc7 - invalid */
4822/* Opcode VEX.F3.0F 0xc7 - invalid */
4823/* Opcode VEX.F2.0F 0xc7 - invalid */
4824
4825/* Opcode VEX.0F 0xc8 - invalid */
4826/* Opcode VEX.0F 0xc9 - invalid */
4827/* Opcode VEX.0F 0xca - invalid */
4828/* Opcode VEX.0F 0xcb - invalid */
4829/* Opcode VEX.0F 0xcc - invalid */
4830/* Opcode VEX.0F 0xcd - invalid */
4831/* Opcode VEX.0F 0xce - invalid */
4832/* Opcode VEX.0F 0xcf - invalid */
4833
4834
4835/* Opcode VEX.0F 0xd0 - invalid */
4836/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4837FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4838/* Opcode VEX.F3.0F 0xd0 - invalid */
4839/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4840FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4841
4842/* Opcode VEX.0F 0xd1 - invalid */
4843/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4844FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4845{
4846 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4847 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4848 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4849}
4850
4851/* Opcode VEX.F3.0F 0xd1 - invalid */
4852/* Opcode VEX.F2.0F 0xd1 - invalid */
4853
4854/* Opcode VEX.0F 0xd2 - invalid */
4855/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4856FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4857{
4858 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4859 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4860 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4861}
4862
4863/* Opcode VEX.F3.0F 0xd2 - invalid */
4864/* Opcode VEX.F2.0F 0xd2 - invalid */
4865
4866/* Opcode VEX.0F 0xd3 - invalid */
4867/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4868FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4869{
4870 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4871 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4872 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4873}
4874
4875/* Opcode VEX.F3.0F 0xd3 - invalid */
4876/* Opcode VEX.F2.0F 0xd3 - invalid */
4877
4878/* Opcode VEX.0F 0xd4 - invalid */
4879
4880
4881/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4882FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4883{
4884 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4885 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
4886 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4887}
4888
4889
4890/* Opcode VEX.F3.0F 0xd4 - invalid */
4891/* Opcode VEX.F2.0F 0xd4 - invalid */
4892
4893/* Opcode VEX.0F 0xd5 - invalid */
4894
4895
4896/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4897FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4898{
4899 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4900 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4901 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4902}
4903
4904
4905/* Opcode VEX.F3.0F 0xd5 - invalid */
4906/* Opcode VEX.F2.0F 0xd5 - invalid */
4907
4908/* Opcode VEX.0F 0xd6 - invalid */
4909
4910/**
4911 * @opcode 0xd6
4912 * @oppfx 0x66
4913 * @opcpuid avx
4914 * @opgroup og_avx_pcksclr_datamove
4915 * @opxcpttype none
4916 * @optest op1=-1 op2=2 -> op1=2
4917 * @optest op1=0 op2=-42 -> op1=-42
4918 */
4919FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4920{
4921 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 if (IEM_IS_MODRM_REG_MODE(bRm))
4924 {
4925 /*
4926 * Register, register.
4927 */
4928 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4929 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4930
4931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4932 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4933
4934 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4935 IEM_GET_MODRM_REG(pVCpu, bRm));
4936 IEM_MC_ADVANCE_RIP_AND_FINISH();
4937 IEM_MC_END();
4938 }
4939 else
4940 {
4941 /*
4942 * Memory, register.
4943 */
4944 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4945 IEM_MC_LOCAL(uint64_t, uSrc);
4946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4947
4948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4949 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4950 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4951 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4952
4953 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4954 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4955
4956 IEM_MC_ADVANCE_RIP_AND_FINISH();
4957 IEM_MC_END();
4958 }
4959}
4960
4961/* Opcode VEX.F3.0F 0xd6 - invalid */
4962/* Opcode VEX.F2.0F 0xd6 - invalid */
4963
4964
4965/* Opcode VEX.0F 0xd7 - invalid */
4966
4967/** Opcode VEX.66.0F 0xd7 - */
4968FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4969{
4970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4971 /* Docs says register only. */
4972 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4973 {
4974 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4975 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4976 if (pVCpu->iem.s.uVexLength)
4977 {
4978 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4979 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4980 IEM_MC_ARG(uint64_t *, puDst, 0);
4981 IEM_MC_LOCAL(RTUINT256U, uSrc);
4982 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4983 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4984 IEM_MC_PREPARE_AVX_USAGE();
4985 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4986 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4987 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4988 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4989 IEM_MC_ADVANCE_RIP_AND_FINISH();
4990 IEM_MC_END();
4991 }
4992 else
4993 {
4994 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4995 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4996 IEM_MC_ARG(uint64_t *, puDst, 0);
4997 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4999 IEM_MC_PREPARE_AVX_USAGE();
5000 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5001 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5002 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5003 IEM_MC_ADVANCE_RIP_AND_FINISH();
5004 IEM_MC_END();
5005 }
5006 }
5007 else
5008 IEMOP_RAISE_INVALID_OPCODE_RET();
5009}
5010
5011
5012/* Opcode VEX.F3.0F 0xd7 - invalid */
5013/* Opcode VEX.F2.0F 0xd7 - invalid */
5014
5015
5016/* Opcode VEX.0F 0xd8 - invalid */
5017
5018/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5019FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5020{
5021 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5022 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5023 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5024}
5025
5026
5027/* Opcode VEX.F3.0F 0xd8 - invalid */
5028/* Opcode VEX.F2.0F 0xd8 - invalid */
5029
5030/* Opcode VEX.0F 0xd9 - invalid */
5031
5032
5033/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5034FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5035{
5036 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5037 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5038 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5039}
5040
5041
5042/* Opcode VEX.F3.0F 0xd9 - invalid */
5043/* Opcode VEX.F2.0F 0xd9 - invalid */
5044
5045/* Opcode VEX.0F 0xda - invalid */
5046
5047
5048/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5049FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5050{
5051 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5052 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5053 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5054}
5055
5056
5057/* Opcode VEX.F3.0F 0xda - invalid */
5058/* Opcode VEX.F2.0F 0xda - invalid */
5059
5060/* Opcode VEX.0F 0xdb - invalid */
5061
5062
5063/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5064FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5065{
5066 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5067 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5068 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5069}
5070
5071
5072/* Opcode VEX.F3.0F 0xdb - invalid */
5073/* Opcode VEX.F2.0F 0xdb - invalid */
5074
5075/* Opcode VEX.0F 0xdc - invalid */
5076
5077
5078/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5079FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5080{
5081 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5082 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5084}
5085
5086
5087/* Opcode VEX.F3.0F 0xdc - invalid */
5088/* Opcode VEX.F2.0F 0xdc - invalid */
5089
5090/* Opcode VEX.0F 0xdd - invalid */
5091
5092
5093/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5094FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5095{
5096 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5097 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5098 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5099}
5100
5101
5102/* Opcode VEX.F3.0F 0xdd - invalid */
5103/* Opcode VEX.F2.0F 0xdd - invalid */
5104
5105/* Opcode VEX.0F 0xde - invalid */
5106
5107
5108/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5109FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5110{
5111 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5112 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5113 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5114}
5115
5116
5117/* Opcode VEX.F3.0F 0xde - invalid */
5118/* Opcode VEX.F2.0F 0xde - invalid */
5119
5120/* Opcode VEX.0F 0xdf - invalid */
5121
5122
5123/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5124FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5125{
5126 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5127 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5128 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5129}
5130
5131
5132/* Opcode VEX.F3.0F 0xdf - invalid */
5133/* Opcode VEX.F2.0F 0xdf - invalid */
5134
5135/* Opcode VEX.0F 0xe0 - invalid */
5136
5137
5138/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5139FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5140{
5141 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5142 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5143 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5144}
5145
5146
5147/* Opcode VEX.F3.0F 0xe0 - invalid */
5148/* Opcode VEX.F2.0F 0xe0 - invalid */
5149
5150/* Opcode VEX.0F 0xe1 - invalid */
5151/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5152FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5153{
5154 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5155 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5156 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5157}
5158
5159/* Opcode VEX.F3.0F 0xe1 - invalid */
5160/* Opcode VEX.F2.0F 0xe1 - invalid */
5161
5162/* Opcode VEX.0F 0xe2 - invalid */
5163/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5164FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5165{
5166 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5167 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5168 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5169}
5170
5171/* Opcode VEX.F3.0F 0xe2 - invalid */
5172/* Opcode VEX.F2.0F 0xe2 - invalid */
5173
5174/* Opcode VEX.0F 0xe3 - invalid */
5175
5176
5177/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5178FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5179{
5180 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5181 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5182 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5183}
5184
5185
5186/* Opcode VEX.F3.0F 0xe3 - invalid */
5187/* Opcode VEX.F2.0F 0xe3 - invalid */
5188
5189/* Opcode VEX.0F 0xe4 - invalid */
5190
5191
5192/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5193FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5194{
5195 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5196 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5197 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5198}
5199
5200
5201/* Opcode VEX.F3.0F 0xe4 - invalid */
5202/* Opcode VEX.F2.0F 0xe4 - invalid */
5203
5204/* Opcode VEX.0F 0xe5 - invalid */
5205
5206
5207/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5208FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5209{
5210 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5211 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5212 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5213}
5214
5215
5216/* Opcode VEX.F3.0F 0xe5 - invalid */
5217/* Opcode VEX.F2.0F 0xe5 - invalid */
5218
5219/* Opcode VEX.0F 0xe6 - invalid */
5220/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5221FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5222/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5223FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5224/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5225FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5226
5227
5228/* Opcode VEX.0F 0xe7 - invalid */
5229
5230/**
5231 * @opcode 0xe7
5232 * @opcodesub !11 mr/reg
5233 * @oppfx 0x66
5234 * @opcpuid avx
5235 * @opgroup og_avx_cachect
5236 * @opxcpttype 1
5237 * @optest op1=-1 op2=2 -> op1=2
5238 * @optest op1=0 op2=-42 -> op1=-42
5239 */
5240FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5241{
5242 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5243 Assert(pVCpu->iem.s.uVexLength <= 1);
5244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5245 if (IEM_IS_MODRM_MEM_MODE(bRm))
5246 {
5247 if (pVCpu->iem.s.uVexLength == 0)
5248 {
5249 /*
5250 * 128-bit: Memory, register.
5251 */
5252 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5253 IEM_MC_LOCAL(RTUINT128U, uSrc);
5254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5255
5256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5257 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5260
5261 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5262 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5263
5264 IEM_MC_ADVANCE_RIP_AND_FINISH();
5265 IEM_MC_END();
5266 }
5267 else
5268 {
5269 /*
5270 * 256-bit: Memory, register.
5271 */
5272 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5273 IEM_MC_LOCAL(RTUINT256U, uSrc);
5274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5275
5276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5277 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5278 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5279 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5280
5281 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5282 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5283
5284 IEM_MC_ADVANCE_RIP_AND_FINISH();
5285 IEM_MC_END();
5286 }
5287 }
5288 /**
5289 * @opdone
5290 * @opmnemonic udvex660fe7reg
5291 * @opcode 0xe7
5292 * @opcodesub 11 mr/reg
5293 * @oppfx 0x66
5294 * @opunused immediate
5295 * @opcpuid avx
5296 * @optest ->
5297 */
5298 else
5299 IEMOP_RAISE_INVALID_OPCODE_RET();
5300}
5301
5302/* Opcode VEX.F3.0F 0xe7 - invalid */
5303/* Opcode VEX.F2.0F 0xe7 - invalid */
5304
5305
5306/* Opcode VEX.0F 0xe8 - invalid */
5307
5308
5309/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5310FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5311{
5312 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5313 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5314 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5315}
5316
5317
5318/* Opcode VEX.F3.0F 0xe8 - invalid */
5319/* Opcode VEX.F2.0F 0xe8 - invalid */
5320
5321/* Opcode VEX.0F 0xe9 - invalid */
5322
5323
5324/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5325FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5326{
5327 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5328 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5329 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5330}
5331
5332
5333/* Opcode VEX.F3.0F 0xe9 - invalid */
5334/* Opcode VEX.F2.0F 0xe9 - invalid */
5335
5336/* Opcode VEX.0F 0xea - invalid */
5337
5338
5339/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5340FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5341{
5342 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5343 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5344 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5345}
5346
5347
5348/* Opcode VEX.F3.0F 0xea - invalid */
5349/* Opcode VEX.F2.0F 0xea - invalid */
5350
5351/* Opcode VEX.0F 0xeb - invalid */
5352
5353
5354/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5355FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5356{
5357 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5358 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5359 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5360}
5361
5362
5363
5364/* Opcode VEX.F3.0F 0xeb - invalid */
5365/* Opcode VEX.F2.0F 0xeb - invalid */
5366
5367/* Opcode VEX.0F 0xec - invalid */
5368
5369
5370/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5371FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5372{
5373 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5374 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5375 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5376}
5377
5378
5379/* Opcode VEX.F3.0F 0xec - invalid */
5380/* Opcode VEX.F2.0F 0xec - invalid */
5381
5382/* Opcode VEX.0F 0xed - invalid */
5383
5384
5385/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5386FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5387{
5388 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5389 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5390 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5391}
5392
5393
5394/* Opcode VEX.F3.0F 0xed - invalid */
5395/* Opcode VEX.F2.0F 0xed - invalid */
5396
5397/* Opcode VEX.0F 0xee - invalid */
5398
5399
5400/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5401FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5402{
5403 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5404 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5405 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5406}
5407
5408
5409/* Opcode VEX.F3.0F 0xee - invalid */
5410/* Opcode VEX.F2.0F 0xee - invalid */
5411
5412
5413/* Opcode VEX.0F 0xef - invalid */
5414
5415
5416/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5417FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5418{
5419 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5420 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5421 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5422}
5423
5424
5425/* Opcode VEX.F3.0F 0xef - invalid */
5426/* Opcode VEX.F2.0F 0xef - invalid */
5427
5428/* Opcode VEX.0F 0xf0 - invalid */
5429/* Opcode VEX.66.0F 0xf0 - invalid */
5430
5431
5432/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5433FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5434{
5435 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5436 Assert(pVCpu->iem.s.uVexLength <= 1);
5437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5438 if (IEM_IS_MODRM_REG_MODE(bRm))
5439 {
5440 /*
5441 * Register, register - (not implemented, assuming it raises \#UD).
5442 */
5443 IEMOP_RAISE_INVALID_OPCODE_RET();
5444 }
5445 else if (pVCpu->iem.s.uVexLength == 0)
5446 {
5447 /*
5448 * Register, memory128.
5449 */
5450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5451 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5453
5454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5455 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5457 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5458
5459 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5460 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5461
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 }
5465 else
5466 {
5467 /*
5468 * Register, memory256.
5469 */
5470 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5471 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5473
5474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5475 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5478
5479 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5480 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5481
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 }
5485}
5486
5487
5488/* Opcode VEX.0F 0xf1 - invalid */
5489/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5490FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5491{
5492 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5493 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5494 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5495}
5496
5497/* Opcode VEX.F2.0F 0xf1 - invalid */
5498
5499/* Opcode VEX.0F 0xf2 - invalid */
5500/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5501FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5502{
5503 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5504 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5505 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5506}
5507/* Opcode VEX.F2.0F 0xf2 - invalid */
5508
5509/* Opcode VEX.0F 0xf3 - invalid */
5510/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5511FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5512{
5513 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5514 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5515 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5516}
5517/* Opcode VEX.F2.0F 0xf3 - invalid */
5518
5519/* Opcode VEX.0F 0xf4 - invalid */
5520
5521
5522/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5523FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5524{
5525 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5526 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5528}
5529
5530
5531/* Opcode VEX.F2.0F 0xf4 - invalid */
5532
5533/* Opcode VEX.0F 0xf5 - invalid */
5534
5535
5536/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5537FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5538{
5539 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5540 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5541 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5542}
5543
5544
5545/* Opcode VEX.F2.0F 0xf5 - invalid */
5546
5547/* Opcode VEX.0F 0xf6 - invalid */
5548
5549
5550/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5551FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5552{
5553 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5554 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5555 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5556}
5557
5558
5559/* Opcode VEX.F2.0F 0xf6 - invalid */
5560
5561/* Opcode VEX.0F 0xf7 - invalid */
5562/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5563FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5564/* Opcode VEX.F2.0F 0xf7 - invalid */
5565
5566/* Opcode VEX.0F 0xf8 - invalid */
5567
5568
5569/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5570FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5571{
5572 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5573 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
5574 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5575}
5576
5577
5578/* Opcode VEX.F2.0F 0xf8 - invalid */
5579
5580/* Opcode VEX.0F 0xf9 - invalid */
5581
5582
5583/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5584FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5585{
5586 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5587 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
5588 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5589}
5590
5591
5592/* Opcode VEX.F2.0F 0xf9 - invalid */
5593
5594/* Opcode VEX.0F 0xfa - invalid */
5595
5596
5597/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5598FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5599{
5600 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5601 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
5602 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5603}
5604
5605
5606/* Opcode VEX.F2.0F 0xfa - invalid */
5607
5608/* Opcode VEX.0F 0xfb - invalid */
5609
5610
5611/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5612FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5613{
5614 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5615 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
5616 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5617}
5618
5619
5620/* Opcode VEX.F2.0F 0xfb - invalid */
5621
5622/* Opcode VEX.0F 0xfc - invalid */
5623
5624
5625/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5626FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5627{
5628 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5629 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
5630 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5631}
5632
5633
5634/* Opcode VEX.F2.0F 0xfc - invalid */
5635
5636/* Opcode VEX.0F 0xfd - invalid */
5637
5638
5639/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5640FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5641{
5642 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5643 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
5644 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5645}
5646
5647
5648/* Opcode VEX.F2.0F 0xfd - invalid */
5649
5650/* Opcode VEX.0F 0xfe - invalid */
5651
5652
5653/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5654FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5655{
5656 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5657 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
5658 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5659}
5660
5661
5662/* Opcode VEX.F2.0F 0xfe - invalid */
5663
5664
5665/** Opcode **** 0x0f 0xff - UD0 */
5666FNIEMOP_DEF(iemOp_vud0)
5667{
5668/** @todo testcase: vud0 */
5669 IEMOP_MNEMONIC(vud0, "vud0");
5670 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5671 {
5672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5673 if (IEM_IS_MODRM_MEM_MODE(bRm))
5674 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5675 }
5676 IEMOP_HLP_DONE_DECODING();
5677 IEMOP_RAISE_INVALID_OPCODE_RET();
5678}
5679
5680
5681
5682/**
5683 * VEX opcode map \#1.
5684 *
5685 * @sa g_apfnTwoByteMap
5686 */
5687const PFNIEMOP g_apfnVexMap1[] =
5688{
5689 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5690 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5691 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5692 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5693 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5694 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5695 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5696 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5697 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5698 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5699 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5700 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5701 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5702 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5703 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5704 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5705 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5706
5707 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5708 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5709 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5710 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5711 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5712 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5713 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5714 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5715 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5716 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5717 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5718 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5719 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5720 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5721 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5722 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5723
5724 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5725 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5726 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5727 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5728 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5729 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5730 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5731 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5733 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5734 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5735 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5736 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5737 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5738 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5739 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5740
5741 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5742 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5743 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5744 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5745 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5746 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5747 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5748 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5750 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5751 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5752 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5753 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5754 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5755 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5756 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5757
5758 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5759 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5760 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5761 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5762 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5763 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5764 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5765 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5766 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5767 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5768 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5769 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5770 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5771 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5772 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5773 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5774
5775 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5776 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5777 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5778 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5779 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5780 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5781 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5782 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5783 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5784 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5785 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5786 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5787 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5788 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5789 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5790 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5791
5792 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5793 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5794 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5795 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5796 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5797 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5798 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5799 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5800 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5801 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5802 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5803 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5804 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5805 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5806 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5807 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5808
5809 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5810 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5811 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5812 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5813 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5814 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5815 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5816 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5817 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5818 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5819 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5820 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5821 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5822 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5823 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5824 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5825
5826 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5827 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5828 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5829 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5830 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5831 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5832 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5833 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5834 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5835 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5836 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5837 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5838 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5839 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5840 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5841 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5842
5843 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5844 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5845 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5846 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5847 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5848 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5849 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5850 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5851 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5852 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5853 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5854 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5855 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5856 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5857 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5858 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5859
5860 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5861 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5862 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5863 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5864 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5865 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5866 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5867 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5868 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5869 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5870 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5871 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5875 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5876
5877 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5878 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5879 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5880 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5881 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5882 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5883 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5884 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5885 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5887 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5888 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5889 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5890 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5891 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5893
5894 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5895 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5896 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5897 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5898 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5899 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5900 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5901 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5902 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5903 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5904 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5905 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5906 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5907 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5908 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5909 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5910
5911 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5912 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5913 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5914 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5915 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5916 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5917 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5918 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5919 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5920 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5921 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5922 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5923 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5924 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5925 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5926 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5927
5928 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5929 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5930 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5931 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5932 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5933 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5934 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5935 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5936 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5937 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5938 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5939 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5940 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5941 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5942 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5943 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5944
5945 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5946 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5947 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5948 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5949 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5950 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5951 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5952 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5953 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5954 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5955 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5956 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5957 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5958 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5959 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5960 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5961};
5962AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5963/** @} */
5964
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette