VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 95517

Last change on this file since 95517 was 95517, checked in by vboxsync, 3 years ago

VMM/IEM: Simplified IEMOPMEDIAF3 and IEMOPMEDIAOPTF3 function table creation, moving most of them into the functions where they are used. Exceptions are 4 tables used by multiple decoder functions. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 157.7 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 95517 2022-07-05 15:01:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if (IEM_IS_MODRM_REG_MODE(bRm))
37 {
38 /*
39 * Register, register.
40 */
41 if (pVCpu->iem.s.uVexLength)
42 {
43 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
64 IEM_MC_BEGIN(4, 0);
65 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
66 IEM_MC_ARG(PRTUINT128U, puDst, 1);
67 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
68 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
69 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
70 IEM_MC_PREPARE_AVX_USAGE();
71 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
72 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
73 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
74 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
75 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 }
80 else
81 {
82 /*
83 * Register, memory.
84 */
85 if (pVCpu->iem.s.uVexLength)
86 {
87 IEM_MC_BEGIN(4, 4);
88 IEM_MC_LOCAL(RTUINT256U, uDst);
89 IEM_MC_LOCAL(RTUINT256U, uSrc1);
90 IEM_MC_LOCAL(RTUINT256U, uSrc2);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
93 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
95 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
96
97 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
98 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
99 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
100 IEM_MC_PREPARE_AVX_USAGE();
101
102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
103 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
104 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
105 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
106
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 IEM_MC_BEGIN(4, 2);
113 IEM_MC_LOCAL(RTUINT128U, uSrc2);
114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
115 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
116 IEM_MC_ARG(PRTUINT128U, puDst, 1);
117 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
119
120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
121 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
122 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
123 IEM_MC_PREPARE_AVX_USAGE();
124
125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
126 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
127 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
128 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
129 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
130
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 }
134 }
135 return VINF_SUCCESS;
136}
137
138
139/**
140 * Common worker for AVX2 instructions on the forms:
141 * - vpxxx xmm0, xmm1, xmm2/mem128
142 * - vpxxx ymm0, ymm1, ymm2/mem256
143 *
144 * Takes function table for function w/o implicit state parameter.
145 *
146 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
147 */
148FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
149{
150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
151 if (IEM_IS_MODRM_REG_MODE(bRm))
152 {
153 /*
154 * Register, register.
155 */
156 if (pVCpu->iem.s.uVexLength)
157 {
158 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
159 IEM_MC_BEGIN(3, 3);
160 IEM_MC_LOCAL(RTUINT256U, uDst);
161 IEM_MC_LOCAL(RTUINT256U, uSrc1);
162 IEM_MC_LOCAL(RTUINT256U, uSrc2);
163 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
164 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
165 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
166 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
167 IEM_MC_PREPARE_AVX_USAGE();
168 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
169 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
171 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
178 IEM_MC_BEGIN(3, 0);
179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
180 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
181 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
182 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
185 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
188 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 }
193 else
194 {
195 /*
196 * Register, memory.
197 */
198 if (pVCpu->iem.s.uVexLength)
199 {
200 IEM_MC_BEGIN(3, 4);
201 IEM_MC_LOCAL(RTUINT256U, uDst);
202 IEM_MC_LOCAL(RTUINT256U, uSrc1);
203 IEM_MC_LOCAL(RTUINT256U, uSrc2);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
206 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
207 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
208
209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
210 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
211 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
212 IEM_MC_PREPARE_AVX_USAGE();
213
214 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
215 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
217 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
218
219 IEM_MC_ADVANCE_RIP();
220 IEM_MC_END();
221 }
222 else
223 {
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT128U, uSrc2);
226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
227 IEM_MC_ARG(PRTUINT128U, puDst, 0);
228 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
230
231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
232 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
233 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
234 IEM_MC_PREPARE_AVX_USAGE();
235
236 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
237 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
238 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
240 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
241
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 }
246 return VINF_SUCCESS;
247}
248
249
250/**
251 * Common worker for AVX2 instructions on the forms:
252 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
253 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
254 *
255 * The 128-bit memory version of this instruction may elect to skip fetching the
256 * lower 64 bits of the operand. We, however, do not.
257 *
258 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
259 */
260FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
261{
262 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
263}
264
265
266/**
267 * Common worker for AVX2 instructions on the forms:
268 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
269 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
270 *
271 * The 128-bit memory version of this instruction may elect to skip fetching the
272 * higher 64 bits of the operand. We, however, do not.
273 *
274 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
275 */
276FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
277{
278 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
279}
280
281
282
283/* Opcode VEX.0F 0x00 - invalid */
284/* Opcode VEX.0F 0x01 - invalid */
285/* Opcode VEX.0F 0x02 - invalid */
286/* Opcode VEX.0F 0x03 - invalid */
287/* Opcode VEX.0F 0x04 - invalid */
288/* Opcode VEX.0F 0x05 - invalid */
289/* Opcode VEX.0F 0x06 - invalid */
290/* Opcode VEX.0F 0x07 - invalid */
291/* Opcode VEX.0F 0x08 - invalid */
292/* Opcode VEX.0F 0x09 - invalid */
293/* Opcode VEX.0F 0x0a - invalid */
294
295/** Opcode VEX.0F 0x0b. */
296FNIEMOP_DEF(iemOp_vud2)
297{
298 IEMOP_MNEMONIC(vud2, "vud2");
299 return IEMOP_RAISE_INVALID_OPCODE();
300}
301
302/* Opcode VEX.0F 0x0c - invalid */
303/* Opcode VEX.0F 0x0d - invalid */
304/* Opcode VEX.0F 0x0e - invalid */
305/* Opcode VEX.0F 0x0f - invalid */
306
307
308/**
309 * @opcode 0x10
310 * @oppfx none
311 * @opcpuid avx
312 * @opgroup og_avx_simdfp_datamove
313 * @opxcpttype 4UA
314 * @optest op1=1 op2=2 -> op1=2
315 * @optest op1=0 op2=-22 -> op1=-22
316 */
317FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
318{
319 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
320 Assert(pVCpu->iem.s.uVexLength <= 1);
321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
322 if (IEM_IS_MODRM_REG_MODE(bRm))
323 {
324 /*
325 * Register, register.
326 */
327 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
328 IEM_MC_BEGIN(0, 0);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
331 if (pVCpu->iem.s.uVexLength == 0)
332 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
333 IEM_GET_MODRM_RM(pVCpu, bRm));
334 else
335 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
336 IEM_GET_MODRM_RM(pVCpu, bRm));
337 IEM_MC_ADVANCE_RIP();
338 IEM_MC_END();
339 }
340 else if (pVCpu->iem.s.uVexLength == 0)
341 {
342 /*
343 * 128-bit: Register, Memory
344 */
345 IEM_MC_BEGIN(0, 2);
346 IEM_MC_LOCAL(RTUINT128U, uSrc);
347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
353
354 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
356
357 IEM_MC_ADVANCE_RIP();
358 IEM_MC_END();
359 }
360 else
361 {
362 /*
363 * 256-bit: Register, Memory
364 */
365 IEM_MC_BEGIN(0, 2);
366 IEM_MC_LOCAL(RTUINT256U, uSrc);
367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
368
369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
370 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
371 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
372 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
373
374 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
375 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
376
377 IEM_MC_ADVANCE_RIP();
378 IEM_MC_END();
379 }
380 return VINF_SUCCESS;
381}
382
383
384/**
385 * @opcode 0x10
386 * @oppfx 0x66
387 * @opcpuid avx
388 * @opgroup og_avx_simdfp_datamove
389 * @opxcpttype 4UA
390 * @optest op1=1 op2=2 -> op1=2
391 * @optest op1=0 op2=-22 -> op1=-22
392 */
393FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
394{
395 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
396 Assert(pVCpu->iem.s.uVexLength <= 1);
397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
398 if (IEM_IS_MODRM_REG_MODE(bRm))
399 {
400 /*
401 * Register, register.
402 */
403 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
404 IEM_MC_BEGIN(0, 0);
405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
407 if (pVCpu->iem.s.uVexLength == 0)
408 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
409 IEM_GET_MODRM_RM(pVCpu, bRm));
410 else
411 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
412 IEM_GET_MODRM_RM(pVCpu, bRm));
413 IEM_MC_ADVANCE_RIP();
414 IEM_MC_END();
415 }
416 else if (pVCpu->iem.s.uVexLength == 0)
417 {
418 /*
419 * 128-bit: Memory, register.
420 */
421 IEM_MC_BEGIN(0, 2);
422 IEM_MC_LOCAL(RTUINT128U, uSrc);
423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
424
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
428 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
429
430 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
431 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 }
436 else
437 {
438 /*
439 * 256-bit: Memory, register.
440 */
441 IEM_MC_BEGIN(0, 2);
442 IEM_MC_LOCAL(RTUINT256U, uSrc);
443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
444
445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
446 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
447 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
448 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
449
450 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
451 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
452
453 IEM_MC_ADVANCE_RIP();
454 IEM_MC_END();
455 }
456 return VINF_SUCCESS;
457}
458
459
460FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
461{
462 Assert(pVCpu->iem.s.uVexLength <= 1);
463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
464 if (IEM_IS_MODRM_REG_MODE(bRm))
465 {
466 /**
467 * @opcode 0x10
468 * @oppfx 0xf3
469 * @opcodesub 11 mr/reg
470 * @opcpuid avx
471 * @opgroup og_avx_simdfp_datamerge
472 * @opxcpttype 5
473 * @optest op1=1 op2=0 op3=2 -> op1=2
474 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
475 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
476 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
477 * @note HssHi refers to bits 127:32.
478 */
479 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
480 IEMOP_HLP_DONE_VEX_DECODING();
481 IEM_MC_BEGIN(0, 0);
482
483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
485 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
486 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
487 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 else
492 {
493 /**
494 * @opdone
495 * @opcode 0x10
496 * @oppfx 0xf3
497 * @opcodesub !11 mr/reg
498 * @opcpuid avx
499 * @opgroup og_avx_simdfp_datamove
500 * @opxcpttype 5
501 * @opfunction iemOp_vmovss_Vss_Hss_Wss
502 * @optest op1=1 op2=2 -> op1=2
503 * @optest op1=0 op2=-22 -> op1=-22
504 */
505 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
506 IEM_MC_BEGIN(0, 2);
507 IEM_MC_LOCAL(uint32_t, uSrc);
508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
509
510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
511 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
514
515 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
516 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
517
518 IEM_MC_ADVANCE_RIP();
519 IEM_MC_END();
520 }
521
522 return VINF_SUCCESS;
523}
524
525
526FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
527{
528 Assert(pVCpu->iem.s.uVexLength <= 1);
529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
530 if (IEM_IS_MODRM_REG_MODE(bRm))
531 {
532 /**
533 * @opcode 0x10
534 * @oppfx 0xf2
535 * @opcodesub 11 mr/reg
536 * @opcpuid avx
537 * @opgroup og_avx_simdfp_datamerge
538 * @opxcpttype 5
539 * @optest op1=1 op2=0 op3=2 -> op1=2
540 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
541 * @optest op1=3 op2=-1 op3=0x77 ->
542 * op1=0xffffffffffffffff0000000000000077
543 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
544 */
545 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
546 IEMOP_HLP_DONE_VEX_DECODING();
547 IEM_MC_BEGIN(0, 0);
548
549 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
550 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
551 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
552 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
553 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
554 IEM_MC_ADVANCE_RIP();
555 IEM_MC_END();
556 }
557 else
558 {
559 /**
560 * @opdone
561 * @opcode 0x10
562 * @oppfx 0xf2
563 * @opcodesub !11 mr/reg
564 * @opcpuid avx
565 * @opgroup og_avx_simdfp_datamove
566 * @opxcpttype 5
567 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
568 * @optest op1=1 op2=2 -> op1=2
569 * @optest op1=0 op2=-22 -> op1=-22
570 */
571 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint64_t, uSrc);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
575
576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
577 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
578 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
579 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
580
581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
582 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
583
584 IEM_MC_ADVANCE_RIP();
585 IEM_MC_END();
586 }
587
588 return VINF_SUCCESS;
589}
590
591
592/**
593 * @opcode 0x11
594 * @oppfx none
595 * @opcpuid avx
596 * @opgroup og_avx_simdfp_datamove
597 * @opxcpttype 4UA
598 * @optest op1=1 op2=2 -> op1=2
599 * @optest op1=0 op2=-22 -> op1=-22
600 */
601FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
602{
603 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
604 Assert(pVCpu->iem.s.uVexLength <= 1);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
606 if (IEM_IS_MODRM_REG_MODE(bRm))
607 {
608 /*
609 * Register, register.
610 */
611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
612 IEM_MC_BEGIN(0, 0);
613 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
614 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
615 if (pVCpu->iem.s.uVexLength == 0)
616 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
617 IEM_GET_MODRM_REG(pVCpu, bRm));
618 else
619 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
620 IEM_GET_MODRM_REG(pVCpu, bRm));
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 }
624 else if (pVCpu->iem.s.uVexLength == 0)
625 {
626 /*
627 * 128-bit: Memory, register.
628 */
629 IEM_MC_BEGIN(0, 2);
630 IEM_MC_LOCAL(RTUINT128U, uSrc);
631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
632
633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
634 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
635 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
636 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
637
638 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
639 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
640
641 IEM_MC_ADVANCE_RIP();
642 IEM_MC_END();
643 }
644 else
645 {
646 /*
647 * 256-bit: Memory, register.
648 */
649 IEM_MC_BEGIN(0, 2);
650 IEM_MC_LOCAL(RTUINT256U, uSrc);
651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
652
653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
654 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
656 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
657
658 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
659 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
660
661 IEM_MC_ADVANCE_RIP();
662 IEM_MC_END();
663 }
664 return VINF_SUCCESS;
665}
666
667
668/**
669 * @opcode 0x11
670 * @oppfx 0x66
671 * @opcpuid avx
672 * @opgroup og_avx_simdfp_datamove
673 * @opxcpttype 4UA
674 * @optest op1=1 op2=2 -> op1=2
675 * @optest op1=0 op2=-22 -> op1=-22
676 */
677FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
678{
679 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
680 Assert(pVCpu->iem.s.uVexLength <= 1);
681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
682 if (IEM_IS_MODRM_REG_MODE(bRm))
683 {
684 /*
685 * Register, register.
686 */
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_BEGIN(0, 0);
689 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
690 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
691 if (pVCpu->iem.s.uVexLength == 0)
692 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
693 IEM_GET_MODRM_REG(pVCpu, bRm));
694 else
695 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
696 IEM_GET_MODRM_REG(pVCpu, bRm));
697 IEM_MC_ADVANCE_RIP();
698 IEM_MC_END();
699 }
700 else if (pVCpu->iem.s.uVexLength == 0)
701 {
702 /*
703 * 128-bit: Memory, register.
704 */
705 IEM_MC_BEGIN(0, 2);
706 IEM_MC_LOCAL(RTUINT128U, uSrc);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708
709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
710 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
713
714 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
715 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
716
717 IEM_MC_ADVANCE_RIP();
718 IEM_MC_END();
719 }
720 else
721 {
722 /*
723 * 256-bit: Memory, register.
724 */
725 IEM_MC_BEGIN(0, 2);
726 IEM_MC_LOCAL(RTUINT256U, uSrc);
727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
728
729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
730 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
731 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
732 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
733
734 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
735 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
736
737 IEM_MC_ADVANCE_RIP();
738 IEM_MC_END();
739 }
740 return VINF_SUCCESS;
741}
742
743
744FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
745{
746 Assert(pVCpu->iem.s.uVexLength <= 1);
747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
748 if (IEM_IS_MODRM_REG_MODE(bRm))
749 {
750 /**
751 * @opcode 0x11
752 * @oppfx 0xf3
753 * @opcodesub 11 mr/reg
754 * @opcpuid avx
755 * @opgroup og_avx_simdfp_datamerge
756 * @opxcpttype 5
757 * @optest op1=1 op2=0 op3=2 -> op1=2
758 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
759 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
760 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
761 */
762 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
763 IEMOP_HLP_DONE_VEX_DECODING();
764 IEM_MC_BEGIN(0, 0);
765
766 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
767 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
768 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
769 IEM_GET_MODRM_REG(pVCpu, bRm),
770 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
771 IEM_MC_ADVANCE_RIP();
772 IEM_MC_END();
773 }
774 else
775 {
776 /**
777 * @opdone
778 * @opcode 0x11
779 * @oppfx 0xf3
780 * @opcodesub !11 mr/reg
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 5
784 * @opfunction iemOp_vmovss_Vss_Hss_Wss
785 * @optest op1=1 op2=2 -> op1=2
786 * @optest op1=0 op2=-22 -> op1=-22
787 */
788 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
789 IEM_MC_BEGIN(0, 2);
790 IEM_MC_LOCAL(uint32_t, uSrc);
791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
792
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
794 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
795 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
796 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
797
798 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
799 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
800
801 IEM_MC_ADVANCE_RIP();
802 IEM_MC_END();
803 }
804
805 return VINF_SUCCESS;
806}
807
808
809FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
810{
811 Assert(pVCpu->iem.s.uVexLength <= 1);
812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
813 if (IEM_IS_MODRM_REG_MODE(bRm))
814 {
815 /**
816 * @opcode 0x11
817 * @oppfx 0xf2
818 * @opcodesub 11 mr/reg
819 * @opcpuid avx
820 * @opgroup og_avx_simdfp_datamerge
821 * @opxcpttype 5
822 * @optest op1=1 op2=0 op3=2 -> op1=2
823 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
824 * @optest op1=3 op2=-1 op3=0x77 ->
825 * op1=0xffffffffffffffff0000000000000077
826 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
827 */
828 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
829 IEMOP_HLP_DONE_VEX_DECODING();
830 IEM_MC_BEGIN(0, 0);
831
832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
833 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
834 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
835 IEM_GET_MODRM_REG(pVCpu, bRm),
836 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 }
840 else
841 {
842 /**
843 * @opdone
844 * @opcode 0x11
845 * @oppfx 0xf2
846 * @opcodesub !11 mr/reg
847 * @opcpuid avx
848 * @opgroup og_avx_simdfp_datamove
849 * @opxcpttype 5
850 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
851 * @optest op1=1 op2=2 -> op1=2
852 * @optest op1=0 op2=-22 -> op1=-22
853 */
854 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
855 IEM_MC_BEGIN(0, 2);
856 IEM_MC_LOCAL(uint64_t, uSrc);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
861 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
862 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
863
864 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
865 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
866
867 IEM_MC_ADVANCE_RIP();
868 IEM_MC_END();
869 }
870
871 return VINF_SUCCESS;
872}
873
874
875FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
876{
877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
878 if (IEM_IS_MODRM_REG_MODE(bRm))
879 {
880 /**
881 * @opcode 0x12
882 * @opcodesub 11 mr/reg
883 * @oppfx none
884 * @opcpuid avx
885 * @opgroup og_avx_simdfp_datamerge
886 * @opxcpttype 7LZ
887 * @optest op2=0x2200220122022203
888 * op3=0x3304330533063307
889 * -> op1=0x22002201220222033304330533063307
890 * @optest op2=-1 op3=-42 -> op1=-42
891 * @note op3 and op2 are only the 8-byte high XMM register halfs.
892 */
893 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
894
895 IEMOP_HLP_DONE_VEX_DECODING_L0();
896 IEM_MC_BEGIN(0, 0);
897
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
900 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
901 IEM_GET_MODRM_RM(pVCpu, bRm),
902 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
903
904 IEM_MC_ADVANCE_RIP();
905 IEM_MC_END();
906 }
907 else
908 {
909 /**
910 * @opdone
911 * @opcode 0x12
912 * @opcodesub !11 mr/reg
913 * @oppfx none
914 * @opcpuid avx
915 * @opgroup og_avx_simdfp_datamove
916 * @opxcpttype 5LZ
917 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
918 * @optest op1=1 op2=0 op3=0 -> op1=0
919 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
920 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
921 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
922 */
923 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
924
925 IEM_MC_BEGIN(0, 2);
926 IEM_MC_LOCAL(uint64_t, uSrc);
927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
928
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_VEX_DECODING_L0();
931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
932 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
933
934 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
935 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
936 uSrc,
937 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
938
939 IEM_MC_ADVANCE_RIP();
940 IEM_MC_END();
941 }
942 return VINF_SUCCESS;
943}
944
945
946/**
947 * @opcode 0x12
948 * @opcodesub !11 mr/reg
949 * @oppfx 0x66
950 * @opcpuid avx
951 * @opgroup og_avx_pcksclr_datamerge
952 * @opxcpttype 5LZ
953 * @optest op2=0 op3=2 -> op1=2
954 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
955 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
956 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
957 */
958FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
959{
960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
961 if (IEM_IS_MODRM_MEM_MODE(bRm))
962 {
963 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
964
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_L0();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
973
974 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
975 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
976 uSrc,
977 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
978
979 IEM_MC_ADVANCE_RIP();
980 IEM_MC_END();
981 return VINF_SUCCESS;
982 }
983
984 /**
985 * @opdone
986 * @opmnemonic udvex660f12m3
987 * @opcode 0x12
988 * @opcodesub 11 mr/reg
989 * @oppfx 0x66
990 * @opunused immediate
991 * @opcpuid avx
992 * @optest ->
993 */
994 return IEMOP_RAISE_INVALID_OPCODE();
995}
996
997
998/**
999 * @opcode 0x12
1000 * @oppfx 0xf3
1001 * @opcpuid avx
1002 * @opgroup og_avx_pcksclr_datamove
1003 * @opxcpttype 4
1004 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1005 * -> op1=0x00000002000000020000000100000001
1006 * @optest vex.l==1 /
1007 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1008 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1009 */
1010FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1011{
1012 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if (IEM_IS_MODRM_REG_MODE(bRm))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1021 if (pVCpu->iem.s.uVexLength == 0)
1022 {
1023 IEM_MC_BEGIN(2, 0);
1024 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1025 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1026
1027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1028 IEM_MC_PREPARE_AVX_USAGE();
1029
1030 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1032 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1033 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1034
1035 IEM_MC_ADVANCE_RIP();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 IEM_MC_BEGIN(3, 0);
1041 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1042 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1043 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1044
1045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1046 IEM_MC_PREPARE_AVX_USAGE();
1047 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 }
1053 else
1054 {
1055 /*
1056 * Register, memory.
1057 */
1058 if (pVCpu->iem.s.uVexLength == 0)
1059 {
1060 IEM_MC_BEGIN(2, 2);
1061 IEM_MC_LOCAL(RTUINT128U, uSrc);
1062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1063 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1064 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1065
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1067 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1069 IEM_MC_PREPARE_AVX_USAGE();
1070
1071 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1073 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1074 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1075
1076 IEM_MC_ADVANCE_RIP();
1077 IEM_MC_END();
1078 }
1079 else
1080 {
1081 IEM_MC_BEGIN(3, 2);
1082 IEM_MC_LOCAL(RTUINT256U, uSrc);
1083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1084 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1085 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1086 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1087
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1089 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1091 IEM_MC_PREPARE_AVX_USAGE();
1092
1093 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1094 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1095
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 }
1099 }
1100 return VINF_SUCCESS;
1101}
1102
1103
1104/**
1105 * @opcode 0x12
1106 * @oppfx 0xf2
1107 * @opcpuid avx
1108 * @opgroup og_avx_pcksclr_datamove
1109 * @opxcpttype 5
1110 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1111 * -> op1=0x22222222111111112222222211111111
1112 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1113 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1114 */
1115FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1116{
1117 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1119 if (IEM_IS_MODRM_REG_MODE(bRm))
1120 {
1121 /*
1122 * Register, register.
1123 */
1124 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1125 if (pVCpu->iem.s.uVexLength == 0)
1126 {
1127 IEM_MC_BEGIN(2, 0);
1128 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1129 IEM_MC_ARG(uint64_t, uSrc, 1);
1130
1131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1132 IEM_MC_PREPARE_AVX_USAGE();
1133
1134 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1135 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1136 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1137 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 else
1143 {
1144 IEM_MC_BEGIN(3, 0);
1145 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1146 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1147 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1148
1149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1150 IEM_MC_PREPARE_AVX_USAGE();
1151 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1152
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 }
1157 else
1158 {
1159 /*
1160 * Register, memory.
1161 */
1162 if (pVCpu->iem.s.uVexLength == 0)
1163 {
1164 IEM_MC_BEGIN(2, 2);
1165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1166 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1167 IEM_MC_ARG(uint64_t, uSrc, 1);
1168
1169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1170 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1171 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1172 IEM_MC_PREPARE_AVX_USAGE();
1173
1174 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1175 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1176 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1177 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1178
1179 IEM_MC_ADVANCE_RIP();
1180 IEM_MC_END();
1181 }
1182 else
1183 {
1184 IEM_MC_BEGIN(3, 2);
1185 IEM_MC_LOCAL(RTUINT256U, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1188 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1189 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1190
1191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1194 IEM_MC_PREPARE_AVX_USAGE();
1195
1196 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1198
1199 IEM_MC_ADVANCE_RIP();
1200 IEM_MC_END();
1201 }
1202 }
1203 return VINF_SUCCESS;
1204}
1205
1206
1207/**
1208 * @opcode 0x13
1209 * @opcodesub !11 mr/reg
1210 * @oppfx none
1211 * @opcpuid avx
1212 * @opgroup og_avx_simdfp_datamove
1213 * @opxcpttype 5
1214 * @optest op1=1 op2=2 -> op1=2
1215 * @optest op1=0 op2=-42 -> op1=-42
1216 */
1217FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1218{
1219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1220 if (IEM_IS_MODRM_MEM_MODE(bRm))
1221 {
1222 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1223
1224 IEM_MC_BEGIN(0, 2);
1225 IEM_MC_LOCAL(uint64_t, uSrc);
1226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1227
1228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1229 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1232
1233 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1234 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1235
1236 IEM_MC_ADVANCE_RIP();
1237 IEM_MC_END();
1238 return VINF_SUCCESS;
1239 }
1240
1241 /**
1242 * @opdone
1243 * @opmnemonic udvex0f13m3
1244 * @opcode 0x13
1245 * @opcodesub 11 mr/reg
1246 * @oppfx none
1247 * @opunused immediate
1248 * @opcpuid avx
1249 * @optest ->
1250 */
1251 return IEMOP_RAISE_INVALID_OPCODE();
1252}
1253
1254
1255/**
1256 * @opcode 0x13
1257 * @opcodesub !11 mr/reg
1258 * @oppfx 0x66
1259 * @opcpuid avx
1260 * @opgroup og_avx_pcksclr_datamove
1261 * @opxcpttype 5
1262 * @optest op1=1 op2=2 -> op1=2
1263 * @optest op1=0 op2=-42 -> op1=-42
1264 */
1265FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1266{
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 if (IEM_IS_MODRM_MEM_MODE(bRm))
1269 {
1270 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1271 IEM_MC_BEGIN(0, 2);
1272 IEM_MC_LOCAL(uint64_t, uSrc);
1273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1274
1275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1276 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1277 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1278 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1279
1280 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1281 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1282
1283 IEM_MC_ADVANCE_RIP();
1284 IEM_MC_END();
1285 return VINF_SUCCESS;
1286 }
1287
1288 /**
1289 * @opdone
1290 * @opmnemonic udvex660f13m3
1291 * @opcode 0x13
1292 * @opcodesub 11 mr/reg
1293 * @oppfx 0x66
1294 * @opunused immediate
1295 * @opcpuid avx
1296 * @optest ->
1297 */
1298 return IEMOP_RAISE_INVALID_OPCODE();
1299}
1300
1301/* Opcode VEX.F3.0F 0x13 - invalid */
1302/* Opcode VEX.F2.0F 0x13 - invalid */
1303
1304/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1305FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1306/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1307FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1308/* Opcode VEX.F3.0F 0x14 - invalid */
1309/* Opcode VEX.F2.0F 0x14 - invalid */
1310/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1311FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1312/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1313FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1314/* Opcode VEX.F3.0F 0x15 - invalid */
1315/* Opcode VEX.F2.0F 0x15 - invalid */
1316/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1317FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1318/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1319FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1320/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1321FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1322/* Opcode VEX.F2.0F 0x16 - invalid */
1323/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
1324FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1325/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
1326FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1327/* Opcode VEX.F3.0F 0x17 - invalid */
1328/* Opcode VEX.F2.0F 0x17 - invalid */
1329
1330
1331/* Opcode VEX.0F 0x18 - invalid */
1332/* Opcode VEX.0F 0x19 - invalid */
1333/* Opcode VEX.0F 0x1a - invalid */
1334/* Opcode VEX.0F 0x1b - invalid */
1335/* Opcode VEX.0F 0x1c - invalid */
1336/* Opcode VEX.0F 0x1d - invalid */
1337/* Opcode VEX.0F 0x1e - invalid */
1338/* Opcode VEX.0F 0x1f - invalid */
1339
1340/* Opcode VEX.0F 0x20 - invalid */
1341/* Opcode VEX.0F 0x21 - invalid */
1342/* Opcode VEX.0F 0x22 - invalid */
1343/* Opcode VEX.0F 0x23 - invalid */
1344/* Opcode VEX.0F 0x24 - invalid */
1345/* Opcode VEX.0F 0x25 - invalid */
1346/* Opcode VEX.0F 0x26 - invalid */
1347/* Opcode VEX.0F 0x27 - invalid */
1348
1349/**
1350 * @opcode 0x28
1351 * @oppfx none
1352 * @opcpuid avx
1353 * @opgroup og_avx_pcksclr_datamove
1354 * @opxcpttype 1
1355 * @optest op1=1 op2=2 -> op1=2
1356 * @optest op1=0 op2=-42 -> op1=-42
1357 * @note Almost identical to vmovapd.
1358 */
1359FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1360{
1361 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1363 Assert(pVCpu->iem.s.uVexLength <= 1);
1364 if (IEM_IS_MODRM_REG_MODE(bRm))
1365 {
1366 /*
1367 * Register, register.
1368 */
1369 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1370 IEM_MC_BEGIN(1, 0);
1371
1372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1374 if (pVCpu->iem.s.uVexLength == 0)
1375 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1376 IEM_GET_MODRM_RM(pVCpu, bRm));
1377 else
1378 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1379 IEM_GET_MODRM_RM(pVCpu, bRm));
1380 IEM_MC_ADVANCE_RIP();
1381 IEM_MC_END();
1382 }
1383 else
1384 {
1385 /*
1386 * Register, memory.
1387 */
1388 if (pVCpu->iem.s.uVexLength == 0)
1389 {
1390 IEM_MC_BEGIN(0, 2);
1391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1392 IEM_MC_LOCAL(RTUINT128U, uSrc);
1393
1394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1395 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1397 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1398
1399 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1400 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1401
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 IEM_MC_BEGIN(0, 2);
1408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1409 IEM_MC_LOCAL(RTUINT256U, uSrc);
1410
1411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1412 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1415
1416 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1417 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1418
1419 IEM_MC_ADVANCE_RIP();
1420 IEM_MC_END();
1421 }
1422 }
1423 return VINF_SUCCESS;
1424}
1425
1426
1427/**
1428 * @opcode 0x28
1429 * @oppfx 66
1430 * @opcpuid avx
1431 * @opgroup og_avx_pcksclr_datamove
1432 * @opxcpttype 1
1433 * @optest op1=1 op2=2 -> op1=2
1434 * @optest op1=0 op2=-42 -> op1=-42
1435 * @note Almost identical to vmovaps
1436 */
1437FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1438{
1439 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1441 Assert(pVCpu->iem.s.uVexLength <= 1);
1442 if (IEM_IS_MODRM_REG_MODE(bRm))
1443 {
1444 /*
1445 * Register, register.
1446 */
1447 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1448 IEM_MC_BEGIN(1, 0);
1449
1450 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1451 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1452 if (pVCpu->iem.s.uVexLength == 0)
1453 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1454 IEM_GET_MODRM_RM(pVCpu, bRm));
1455 else
1456 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1457 IEM_GET_MODRM_RM(pVCpu, bRm));
1458 IEM_MC_ADVANCE_RIP();
1459 IEM_MC_END();
1460 }
1461 else
1462 {
1463 /*
1464 * Register, memory.
1465 */
1466 if (pVCpu->iem.s.uVexLength == 0)
1467 {
1468 IEM_MC_BEGIN(0, 2);
1469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1470 IEM_MC_LOCAL(RTUINT128U, uSrc);
1471
1472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1473 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1474 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1475 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1476
1477 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1478 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1479
1480 IEM_MC_ADVANCE_RIP();
1481 IEM_MC_END();
1482 }
1483 else
1484 {
1485 IEM_MC_BEGIN(0, 2);
1486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1487 IEM_MC_LOCAL(RTUINT256U, uSrc);
1488
1489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1492 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1493
1494 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1495 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1496
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 }
1501 return VINF_SUCCESS;
1502}
1503
1504/**
1505 * @opmnemonic udvexf30f28
1506 * @opcode 0x28
1507 * @oppfx 0xf3
1508 * @opunused vex.modrm
1509 * @opcpuid avx
1510 * @optest ->
1511 * @opdone
1512 */
1513
1514/**
1515 * @opmnemonic udvexf20f28
1516 * @opcode 0x28
1517 * @oppfx 0xf2
1518 * @opunused vex.modrm
1519 * @opcpuid avx
1520 * @optest ->
1521 * @opdone
1522 */
1523
1524/**
1525 * @opcode 0x29
1526 * @oppfx none
1527 * @opcpuid avx
1528 * @opgroup og_avx_pcksclr_datamove
1529 * @opxcpttype 1
1530 * @optest op1=1 op2=2 -> op1=2
1531 * @optest op1=0 op2=-42 -> op1=-42
1532 * @note Almost identical to vmovapd.
1533 */
1534FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1535{
1536 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 Assert(pVCpu->iem.s.uVexLength <= 1);
1539 if (IEM_IS_MODRM_REG_MODE(bRm))
1540 {
1541 /*
1542 * Register, register.
1543 */
1544 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1545 IEM_MC_BEGIN(1, 0);
1546
1547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1549 if (pVCpu->iem.s.uVexLength == 0)
1550 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1551 IEM_GET_MODRM_REG(pVCpu, bRm));
1552 else
1553 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1554 IEM_GET_MODRM_REG(pVCpu, bRm));
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 if (pVCpu->iem.s.uVexLength == 0)
1564 {
1565 IEM_MC_BEGIN(0, 2);
1566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1567 IEM_MC_LOCAL(RTUINT128U, uSrc);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1572 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1573
1574 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1575 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1576
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 }
1580 else
1581 {
1582 IEM_MC_BEGIN(0, 2);
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584 IEM_MC_LOCAL(RTUINT256U, uSrc);
1585
1586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1587 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1590
1591 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1592 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1593
1594 IEM_MC_ADVANCE_RIP();
1595 IEM_MC_END();
1596 }
1597 }
1598 return VINF_SUCCESS;
1599}
1600
1601/**
1602 * @opcode 0x29
1603 * @oppfx 66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 1
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 * @note Almost identical to vmovaps
1610 */
1611FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1612{
1613 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1614 Assert(pVCpu->iem.s.uVexLength <= 1);
1615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1616 if (IEM_IS_MODRM_REG_MODE(bRm))
1617 {
1618 /*
1619 * Register, register.
1620 */
1621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1622 IEM_MC_BEGIN(1, 0);
1623
1624 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1625 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1626 if (pVCpu->iem.s.uVexLength == 0)
1627 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1628 IEM_GET_MODRM_REG(pVCpu, bRm));
1629 else
1630 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1631 IEM_GET_MODRM_REG(pVCpu, bRm));
1632 IEM_MC_ADVANCE_RIP();
1633 IEM_MC_END();
1634 }
1635 else
1636 {
1637 /*
1638 * Register, memory.
1639 */
1640 if (pVCpu->iem.s.uVexLength == 0)
1641 {
1642 IEM_MC_BEGIN(0, 2);
1643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1644 IEM_MC_LOCAL(RTUINT128U, uSrc);
1645
1646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1647 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1648 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1649 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1650
1651 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1652 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1653
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656 }
1657 else
1658 {
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1661 IEM_MC_LOCAL(RTUINT256U, uSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1665 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1669 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 }
1675 return VINF_SUCCESS;
1676}
1677
1678
1679/**
1680 * @opmnemonic udvexf30f29
1681 * @opcode 0x29
1682 * @oppfx 0xf3
1683 * @opunused vex.modrm
1684 * @opcpuid avx
1685 * @optest ->
1686 * @opdone
1687 */
1688
1689/**
1690 * @opmnemonic udvexf20f29
1691 * @opcode 0x29
1692 * @oppfx 0xf2
1693 * @opunused vex.modrm
1694 * @opcpuid avx
1695 * @optest ->
1696 * @opdone
1697 */
1698
1699
1700/** Opcode VEX.0F 0x2a - invalid */
1701/** Opcode VEX.66.0F 0x2a - invalid */
1702/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
1703FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
1704/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1705FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
1706
1707
1708/**
1709 * @opcode 0x2b
1710 * @opcodesub !11 mr/reg
1711 * @oppfx none
1712 * @opcpuid avx
1713 * @opgroup og_avx_cachect
1714 * @opxcpttype 1
1715 * @optest op1=1 op2=2 -> op1=2
1716 * @optest op1=0 op2=-42 -> op1=-42
1717 * @note Identical implementation to vmovntpd
1718 */
1719FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1720{
1721 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1722 Assert(pVCpu->iem.s.uVexLength <= 1);
1723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1724 if (IEM_IS_MODRM_MEM_MODE(bRm))
1725 {
1726 /*
1727 * memory, register.
1728 */
1729 if (pVCpu->iem.s.uVexLength == 0)
1730 {
1731 IEM_MC_BEGIN(0, 2);
1732 IEM_MC_LOCAL(RTUINT128U, uSrc);
1733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1734
1735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1736 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1738 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1739
1740 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1741 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1742
1743 IEM_MC_ADVANCE_RIP();
1744 IEM_MC_END();
1745 }
1746 else
1747 {
1748 IEM_MC_BEGIN(0, 2);
1749 IEM_MC_LOCAL(RTUINT256U, uSrc);
1750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1751
1752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1753 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1754 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1755 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1756
1757 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1758 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1759
1760 IEM_MC_ADVANCE_RIP();
1761 IEM_MC_END();
1762 }
1763 }
1764 /* The register, register encoding is invalid. */
1765 else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 return VINF_SUCCESS;
1768}
1769
1770/**
1771 * @opcode 0x2b
1772 * @opcodesub !11 mr/reg
1773 * @oppfx 0x66
1774 * @opcpuid avx
1775 * @opgroup og_avx_cachect
1776 * @opxcpttype 1
1777 * @optest op1=1 op2=2 -> op1=2
1778 * @optest op1=0 op2=-42 -> op1=-42
1779 * @note Identical implementation to vmovntps
1780 */
1781FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1782{
1783 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1784 Assert(pVCpu->iem.s.uVexLength <= 1);
1785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1786 if (IEM_IS_MODRM_MEM_MODE(bRm))
1787 {
1788 /*
1789 * memory, register.
1790 */
1791 if (pVCpu->iem.s.uVexLength == 0)
1792 {
1793 IEM_MC_BEGIN(0, 2);
1794 IEM_MC_LOCAL(RTUINT128U, uSrc);
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1796
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1798 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1801
1802 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1803 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1804
1805 IEM_MC_ADVANCE_RIP();
1806 IEM_MC_END();
1807 }
1808 else
1809 {
1810 IEM_MC_BEGIN(0, 2);
1811 IEM_MC_LOCAL(RTUINT256U, uSrc);
1812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1813
1814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1815 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1817 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1818
1819 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1820 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1821
1822 IEM_MC_ADVANCE_RIP();
1823 IEM_MC_END();
1824 }
1825 }
1826 /* The register, register encoding is invalid. */
1827 else
1828 return IEMOP_RAISE_INVALID_OPCODE();
1829 return VINF_SUCCESS;
1830}
1831
1832/**
1833 * @opmnemonic udvexf30f2b
1834 * @opcode 0x2b
1835 * @oppfx 0xf3
1836 * @opunused vex.modrm
1837 * @opcpuid avx
1838 * @optest ->
1839 * @opdone
1840 */
1841
1842/**
1843 * @opmnemonic udvexf20f2b
1844 * @opcode 0x2b
1845 * @oppfx 0xf2
1846 * @opunused vex.modrm
1847 * @opcpuid avx
1848 * @optest ->
1849 * @opdone
1850 */
1851
1852
1853/* Opcode VEX.0F 0x2c - invalid */
1854/* Opcode VEX.66.0F 0x2c - invalid */
1855/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
1856FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1857/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
1858FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1859
1860/* Opcode VEX.0F 0x2d - invalid */
1861/* Opcode VEX.66.0F 0x2d - invalid */
1862/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1863FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1864/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1865FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1866
1867/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1868FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1869/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1870FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1871/* Opcode VEX.F3.0F 0x2e - invalid */
1872/* Opcode VEX.F2.0F 0x2e - invalid */
1873
1874/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1875FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1876/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1877FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1878/* Opcode VEX.F3.0F 0x2f - invalid */
1879/* Opcode VEX.F2.0F 0x2f - invalid */
1880
1881/* Opcode VEX.0F 0x30 - invalid */
1882/* Opcode VEX.0F 0x31 - invalid */
1883/* Opcode VEX.0F 0x32 - invalid */
1884/* Opcode VEX.0F 0x33 - invalid */
1885/* Opcode VEX.0F 0x34 - invalid */
1886/* Opcode VEX.0F 0x35 - invalid */
1887/* Opcode VEX.0F 0x36 - invalid */
1888/* Opcode VEX.0F 0x37 - invalid */
1889/* Opcode VEX.0F 0x38 - invalid */
1890/* Opcode VEX.0F 0x39 - invalid */
1891/* Opcode VEX.0F 0x3a - invalid */
1892/* Opcode VEX.0F 0x3b - invalid */
1893/* Opcode VEX.0F 0x3c - invalid */
1894/* Opcode VEX.0F 0x3d - invalid */
1895/* Opcode VEX.0F 0x3e - invalid */
1896/* Opcode VEX.0F 0x3f - invalid */
1897/* Opcode VEX.0F 0x40 - invalid */
1898/* Opcode VEX.0F 0x41 - invalid */
1899/* Opcode VEX.0F 0x42 - invalid */
1900/* Opcode VEX.0F 0x43 - invalid */
1901/* Opcode VEX.0F 0x44 - invalid */
1902/* Opcode VEX.0F 0x45 - invalid */
1903/* Opcode VEX.0F 0x46 - invalid */
1904/* Opcode VEX.0F 0x47 - invalid */
1905/* Opcode VEX.0F 0x48 - invalid */
1906/* Opcode VEX.0F 0x49 - invalid */
1907/* Opcode VEX.0F 0x4a - invalid */
1908/* Opcode VEX.0F 0x4b - invalid */
1909/* Opcode VEX.0F 0x4c - invalid */
1910/* Opcode VEX.0F 0x4d - invalid */
1911/* Opcode VEX.0F 0x4e - invalid */
1912/* Opcode VEX.0F 0x4f - invalid */
1913
1914/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1915FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1916/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1917FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1918/* Opcode VEX.F3.0F 0x50 - invalid */
1919/* Opcode VEX.F2.0F 0x50 - invalid */
1920
1921/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1922FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1923/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1924FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1925/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1926FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1927/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1928FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1929
1930/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1931FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1932/* Opcode VEX.66.0F 0x52 - invalid */
1933/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1934FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1935/* Opcode VEX.F2.0F 0x52 - invalid */
1936
1937/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1938FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1939/* Opcode VEX.66.0F 0x53 - invalid */
1940/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1941FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1942/* Opcode VEX.F2.0F 0x53 - invalid */
1943
1944
1945/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1946FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
1947{
1948 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1949 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1950 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1951}
1952
1953
1954/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1955FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
1956{
1957 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1958 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1959 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1960}
1961
1962
1963/* Opcode VEX.F3.0F 0x54 - invalid */
1964/* Opcode VEX.F2.0F 0x54 - invalid */
1965
1966
1967/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1968FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
1969{
1970 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1971 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1972 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1973}
1974
1975
1976/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1977FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
1978{
1979 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1980 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1981 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1982}
1983
1984
1985/* Opcode VEX.F3.0F 0x55 - invalid */
1986/* Opcode VEX.F2.0F 0x55 - invalid */
1987
1988/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1989FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
1990{
1991 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1992 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1993 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
1994}
1995
1996
1997/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1998FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
1999{
2000 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2002 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2003}
2004
2005
2006/* Opcode VEX.F3.0F 0x56 - invalid */
2007/* Opcode VEX.F2.0F 0x56 - invalid */
2008
2009
2010/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2011FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2012{
2013 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2014 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2015 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2016}
2017
2018
2019/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2020FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2021{
2022 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2023 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2024 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2025}
2026
2027
2028/* Opcode VEX.F3.0F 0x57 - invalid */
2029/* Opcode VEX.F2.0F 0x57 - invalid */
2030
2031/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2032FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2033/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2034FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2035/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2036FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2037/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2038FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2039
2040/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2041FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2042/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2043FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2044/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2045FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2046/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2047FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2048
2049/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2050FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2051/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2052FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2053/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2054FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2055/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2056FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2057
2058/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2059FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2060/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2061FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2062/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2063FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2064/* Opcode VEX.F2.0F 0x5b - invalid */
2065
2066/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2067FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2068/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2069FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2070/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2071FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2072/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2073FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2074
2075/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2076FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2077/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2078FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2079/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2080FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2081/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2082FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2083
2084/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2085FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2086/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2087FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2088/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2089FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2090/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2091FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2092
2093/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2094FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2095/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2096FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2097/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2098FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2099/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2100FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2101
2102
2103/* Opcode VEX.0F 0x60 - invalid */
2104
2105
2106/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2107FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2108{
2109 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2110 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2111 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2112}
2113
2114
2115/* Opcode VEX.F3.0F 0x60 - invalid */
2116
2117
2118/* Opcode VEX.0F 0x61 - invalid */
2119
2120
2121/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2122FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2123{
2124 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2125 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2126 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2127}
2128
2129
2130/* Opcode VEX.F3.0F 0x61 - invalid */
2131
2132
2133/* Opcode VEX.0F 0x62 - invalid */
2134
2135/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2136FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2137{
2138 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2139 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2141}
2142
2143
2144/* Opcode VEX.F3.0F 0x62 - invalid */
2145
2146
2147
2148/* Opcode VEX.0F 0x63 - invalid */
2149/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2150FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2151/* Opcode VEX.F3.0F 0x63 - invalid */
2152
2153/* Opcode VEX.0F 0x64 - invalid */
2154
2155
2156/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2157FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2158{
2159 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2160 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2161 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2162}
2163
2164
2165/* Opcode VEX.F3.0F 0x64 - invalid */
2166
2167/* Opcode VEX.0F 0x65 - invalid */
2168
2169
2170/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2171FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2172{
2173 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2174 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2175 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2176}
2177
2178
2179/* Opcode VEX.F3.0F 0x65 - invalid */
2180
2181/* Opcode VEX.0F 0x66 - invalid */
2182
2183
2184/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2185FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2186{
2187 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2188 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2190}
2191
2192
2193/* Opcode VEX.F3.0F 0x66 - invalid */
2194
2195/* Opcode VEX.0F 0x67 - invalid */
2196/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2197FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2198/* Opcode VEX.F3.0F 0x67 - invalid */
2199
2200
2201///**
2202// * Common worker for SSE2 instructions on the form:
2203// * pxxxx xmm1, xmm2/mem128
2204// *
2205// * The 2nd operand is the second half of a register, which in the memory case
2206// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2207// * where it may read the full 128 bits or only the upper 64 bits.
2208// *
2209// * Exceptions type 4.
2210// */
2211//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2212//{
2213// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2214// if (IEM_IS_MODRM_REG_MODE(bRm))
2215// {
2216// /*
2217// * Register, register.
2218// */
2219// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220// IEM_MC_BEGIN(2, 0);
2221// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2222// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2223// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2224// IEM_MC_PREPARE_SSE_USAGE();
2225// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2226// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2227// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2228// IEM_MC_ADVANCE_RIP();
2229// IEM_MC_END();
2230// }
2231// else
2232// {
2233// /*
2234// * Register, memory.
2235// */
2236// IEM_MC_BEGIN(2, 2);
2237// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2238// IEM_MC_LOCAL(RTUINT128U, uSrc);
2239// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2240// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2241//
2242// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2243// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2245// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2246//
2247// IEM_MC_PREPARE_SSE_USAGE();
2248// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2249// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2250//
2251// IEM_MC_ADVANCE_RIP();
2252// IEM_MC_END();
2253// }
2254// return VINF_SUCCESS;
2255//}
2256
2257
2258/* Opcode VEX.0F 0x68 - invalid */
2259
2260/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2261FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2262{
2263 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2264 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
2265 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2266}
2267
2268
2269/* Opcode VEX.F3.0F 0x68 - invalid */
2270
2271
2272/* Opcode VEX.0F 0x69 - invalid */
2273
2274
2275/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2276FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2277{
2278 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2279 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
2280 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2281}
2282
2283
2284/* Opcode VEX.F3.0F 0x69 - invalid */
2285
2286
2287/* Opcode VEX.0F 0x6a - invalid */
2288
2289
2290/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2291FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2292{
2293 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2294 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
2295 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2296}
2297
2298
2299/* Opcode VEX.F3.0F 0x6a - invalid */
2300
2301
2302/* Opcode VEX.0F 0x6b - invalid */
2303/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2304FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2305/* Opcode VEX.F3.0F 0x6b - invalid */
2306
2307
2308/* Opcode VEX.0F 0x6c - invalid */
2309
2310
2311/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2312FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2313{
2314 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2315 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
2316 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2317}
2318
2319
2320/* Opcode VEX.F3.0F 0x6c - invalid */
2321/* Opcode VEX.F2.0F 0x6c - invalid */
2322
2323
2324/* Opcode VEX.0F 0x6d - invalid */
2325
2326
2327/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2328FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2329{
2330 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2331 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
2332 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2333}
2334
2335
2336/* Opcode VEX.F3.0F 0x6d - invalid */
2337
2338
2339/* Opcode VEX.0F 0x6e - invalid */
2340
2341FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2342{
2343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2345 {
2346 /**
2347 * @opcode 0x6e
2348 * @opcodesub rex.w=1
2349 * @oppfx 0x66
2350 * @opcpuid avx
2351 * @opgroup og_avx_simdint_datamov
2352 * @opxcpttype 5
2353 * @optest 64-bit / op1=1 op2=2 -> op1=2
2354 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2355 */
2356 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2357 if (IEM_IS_MODRM_REG_MODE(bRm))
2358 {
2359 /* XMM, greg64 */
2360 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2361 IEM_MC_BEGIN(0, 1);
2362 IEM_MC_LOCAL(uint64_t, u64Tmp);
2363
2364 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2365 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2366
2367 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2368 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 else
2374 {
2375 /* XMM, [mem64] */
2376 IEM_MC_BEGIN(0, 2);
2377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2378 IEM_MC_LOCAL(uint64_t, u64Tmp);
2379
2380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2381 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2382 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2383 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2384
2385 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2386 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2387
2388 IEM_MC_ADVANCE_RIP();
2389 IEM_MC_END();
2390 }
2391 }
2392 else
2393 {
2394 /**
2395 * @opdone
2396 * @opcode 0x6e
2397 * @opcodesub rex.w=0
2398 * @oppfx 0x66
2399 * @opcpuid avx
2400 * @opgroup og_avx_simdint_datamov
2401 * @opxcpttype 5
2402 * @opfunction iemOp_vmovd_q_Vy_Ey
2403 * @optest op1=1 op2=2 -> op1=2
2404 * @optest op1=0 op2=-42 -> op1=-42
2405 */
2406 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2407 if (IEM_IS_MODRM_REG_MODE(bRm))
2408 {
2409 /* XMM, greg32 */
2410 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2411 IEM_MC_BEGIN(0, 1);
2412 IEM_MC_LOCAL(uint32_t, u32Tmp);
2413
2414 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2415 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2416
2417 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2418 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2419
2420 IEM_MC_ADVANCE_RIP();
2421 IEM_MC_END();
2422 }
2423 else
2424 {
2425 /* XMM, [mem32] */
2426 IEM_MC_BEGIN(0, 2);
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428 IEM_MC_LOCAL(uint32_t, u32Tmp);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2432 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2433 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2434
2435 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2436 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2437
2438 IEM_MC_ADVANCE_RIP();
2439 IEM_MC_END();
2440 }
2441 }
2442 return VINF_SUCCESS;
2443}
2444
2445
2446/* Opcode VEX.F3.0F 0x6e - invalid */
2447
2448
2449/* Opcode VEX.0F 0x6f - invalid */
2450
2451/**
2452 * @opcode 0x6f
2453 * @oppfx 0x66
2454 * @opcpuid avx
2455 * @opgroup og_avx_simdint_datamove
2456 * @opxcpttype 1
2457 * @optest op1=1 op2=2 -> op1=2
2458 * @optest op1=0 op2=-42 -> op1=-42
2459 */
2460FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2461{
2462 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2463 Assert(pVCpu->iem.s.uVexLength <= 1);
2464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2465 if (IEM_IS_MODRM_REG_MODE(bRm))
2466 {
2467 /*
2468 * Register, register.
2469 */
2470 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2471 IEM_MC_BEGIN(0, 0);
2472
2473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2474 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2475 if (pVCpu->iem.s.uVexLength == 0)
2476 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2477 IEM_GET_MODRM_RM(pVCpu, bRm));
2478 else
2479 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2480 IEM_GET_MODRM_RM(pVCpu, bRm));
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 else if (pVCpu->iem.s.uVexLength == 0)
2485 {
2486 /*
2487 * Register, memory128.
2488 */
2489 IEM_MC_BEGIN(0, 2);
2490 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2492
2493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2494 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2495 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2496 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2497
2498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2499 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
2500
2501 IEM_MC_ADVANCE_RIP();
2502 IEM_MC_END();
2503 }
2504 else
2505 {
2506 /*
2507 * Register, memory256.
2508 */
2509 IEM_MC_BEGIN(0, 2);
2510 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512
2513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2514 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2516 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2517
2518 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2519 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
2520
2521 IEM_MC_ADVANCE_RIP();
2522 IEM_MC_END();
2523 }
2524 return VINF_SUCCESS;
2525}
2526
2527/**
2528 * @opcode 0x6f
2529 * @oppfx 0xf3
2530 * @opcpuid avx
2531 * @opgroup og_avx_simdint_datamove
2532 * @opxcpttype 4UA
2533 * @optest op1=1 op2=2 -> op1=2
2534 * @optest op1=0 op2=-42 -> op1=-42
2535 */
2536FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2537{
2538 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 Assert(pVCpu->iem.s.uVexLength <= 1);
2540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_REG_MODE(bRm))
2542 {
2543 /*
2544 * Register, register.
2545 */
2546 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2547 IEM_MC_BEGIN(0, 0);
2548
2549 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2550 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2551 if (pVCpu->iem.s.uVexLength == 0)
2552 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2553 IEM_GET_MODRM_RM(pVCpu, bRm));
2554 else
2555 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2556 IEM_GET_MODRM_RM(pVCpu, bRm));
2557 IEM_MC_ADVANCE_RIP();
2558 IEM_MC_END();
2559 }
2560 else if (pVCpu->iem.s.uVexLength == 0)
2561 {
2562 /*
2563 * Register, memory128.
2564 */
2565 IEM_MC_BEGIN(0, 2);
2566 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2568
2569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2570 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2572 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2573
2574 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2575 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
2576
2577 IEM_MC_ADVANCE_RIP();
2578 IEM_MC_END();
2579 }
2580 else
2581 {
2582 /*
2583 * Register, memory256.
2584 */
2585 IEM_MC_BEGIN(0, 2);
2586 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2588
2589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2590 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2591 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2592 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2593
2594 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2595 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
2596
2597 IEM_MC_ADVANCE_RIP();
2598 IEM_MC_END();
2599 }
2600 return VINF_SUCCESS;
2601}
2602
2603
2604/* Opcode VEX.0F 0x70 - invalid */
2605
2606
2607/**
2608 * Common worker for AVX/AVX2 instructions on the forms:
2609 * - vpxxx xmm0, xmm2/mem128, imm8
2610 * - vpxxx ymm0, ymm2/mem256, imm8
2611 *
2612 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
2613 */
2614FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
2615{
2616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2617 if (IEM_IS_MODRM_REG_MODE(bRm))
2618 {
2619 /*
2620 * Register, register.
2621 */
2622 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2623 if (pVCpu->iem.s.uVexLength)
2624 {
2625 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2626 IEM_MC_BEGIN(3, 2);
2627 IEM_MC_LOCAL(RTUINT256U, uDst);
2628 IEM_MC_LOCAL(RTUINT256U, uSrc);
2629 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2630 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2631 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2632 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2633 IEM_MC_PREPARE_AVX_USAGE();
2634 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2635 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
2636 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2637 IEM_MC_ADVANCE_RIP();
2638 IEM_MC_END();
2639 }
2640 else
2641 {
2642 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2643 IEM_MC_BEGIN(3, 0);
2644 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2645 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2646 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2647 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2648 IEM_MC_PREPARE_AVX_USAGE();
2649 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2650 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2651 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
2652 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2653 IEM_MC_ADVANCE_RIP();
2654 IEM_MC_END();
2655 }
2656 }
2657 else
2658 {
2659 /*
2660 * Register, memory.
2661 */
2662 if (pVCpu->iem.s.uVexLength)
2663 {
2664 IEM_MC_BEGIN(3, 3);
2665 IEM_MC_LOCAL(RTUINT256U, uDst);
2666 IEM_MC_LOCAL(RTUINT256U, uSrc);
2667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2668 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2669 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2670
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2672 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2673 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2674 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2675 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2676 IEM_MC_PREPARE_AVX_USAGE();
2677
2678 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2679 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
2680 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2681
2682 IEM_MC_ADVANCE_RIP();
2683 IEM_MC_END();
2684 }
2685 else
2686 {
2687 IEM_MC_BEGIN(3, 1);
2688 IEM_MC_LOCAL(RTUINT128U, uSrc);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2690 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2691 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2692
2693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2694 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2695 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2696 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2697 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2698 IEM_MC_PREPARE_AVX_USAGE();
2699
2700 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2701 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2702 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
2703 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2704
2705 IEM_MC_ADVANCE_RIP();
2706 IEM_MC_END();
2707 }
2708 }
2709 return VINF_SUCCESS;
2710}
2711
2712
2713/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
2714FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2715{
2716 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2717 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
2718 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
2719
2720}
2721
2722
2723/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
2724FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2725{
2726 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2727 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
2728 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
2729
2730}
2731
2732
2733/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
2734FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2735{
2736 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2737 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
2738 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
2739}
2740
2741
2742/* Opcode VEX.0F 0x71 11/2 - invalid. */
2743/** Opcode VEX.66.0F 0x71 11/2. */
2744FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2745
2746/* Opcode VEX.0F 0x71 11/4 - invalid */
2747/** Opcode VEX.66.0F 0x71 11/4. */
2748FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2749
2750/* Opcode VEX.0F 0x71 11/6 - invalid */
2751/** Opcode VEX.66.0F 0x71 11/6. */
2752FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2753
2754
2755/**
2756 * VEX Group 12 jump table for register variant.
2757 */
2758IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
2759{
2760 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2761 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2762 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2763 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2764 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2765 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2766 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2767 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2768};
2769AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
2770
2771
2772/** Opcode VEX.0F 0x71. */
2773FNIEMOP_DEF(iemOp_VGrp12)
2774{
2775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2776 if (IEM_IS_MODRM_REG_MODE(bRm))
2777 /* register, register */
2778 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
2779 + pVCpu->iem.s.idxPrefix], bRm);
2780 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2781}
2782
2783
2784/* Opcode VEX.0F 0x72 11/2 - invalid. */
2785/** Opcode VEX.66.0F 0x72 11/2. */
2786FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2787
2788/* Opcode VEX.0F 0x72 11/4 - invalid. */
2789/** Opcode VEX.66.0F 0x72 11/4. */
2790FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2791
2792/* Opcode VEX.0F 0x72 11/6 - invalid. */
2793/** Opcode VEX.66.0F 0x72 11/6. */
2794FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2795
2796
2797/**
2798 * Group 13 jump table for register variant.
2799 */
2800IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
2801{
2802 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2803 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2804 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2805 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2806 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2807 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2808 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2809 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2810};
2811AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
2812
2813/** Opcode VEX.0F 0x72. */
2814FNIEMOP_DEF(iemOp_VGrp13)
2815{
2816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2817 if (IEM_IS_MODRM_REG_MODE(bRm))
2818 /* register, register */
2819 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
2820 + pVCpu->iem.s.idxPrefix], bRm);
2821 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2822}
2823
2824
2825/* Opcode VEX.0F 0x73 11/2 - invalid. */
2826/** Opcode VEX.66.0F 0x73 11/2. */
2827FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
2828
2829/** Opcode VEX.66.0F 0x73 11/3. */
2830FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
2831
2832/* Opcode VEX.0F 0x73 11/6 - invalid. */
2833/** Opcode VEX.66.0F 0x73 11/6. */
2834FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
2835
2836/** Opcode VEX.66.0F 0x73 11/7. */
2837FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
2838
2839/**
2840 * Group 14 jump table for register variant.
2841 */
2842IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
2843{
2844 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2845 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2846 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2847 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2848 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2849 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2850 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2851 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2852};
2853AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
2854
2855
2856/** Opcode VEX.0F 0x73. */
2857FNIEMOP_DEF(iemOp_VGrp14)
2858{
2859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2860 if (IEM_IS_MODRM_REG_MODE(bRm))
2861 /* register, register */
2862 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
2863 + pVCpu->iem.s.idxPrefix], bRm);
2864 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2865}
2866
2867
2868/* Opcode VEX.0F 0x74 - invalid */
2869
2870
2871/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
2872FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
2873{
2874 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2875 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
2876 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2877}
2878
2879/* Opcode VEX.F3.0F 0x74 - invalid */
2880/* Opcode VEX.F2.0F 0x74 - invalid */
2881
2882
2883/* Opcode VEX.0F 0x75 - invalid */
2884
2885
2886/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
2887FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
2888{
2889 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2890 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
2891 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2892}
2893
2894
2895/* Opcode VEX.F3.0F 0x75 - invalid */
2896/* Opcode VEX.F2.0F 0x75 - invalid */
2897
2898
2899/* Opcode VEX.0F 0x76 - invalid */
2900
2901
2902/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
2903FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
2904{
2905 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2906 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
2907 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2908}
2909
2910
2911/* Opcode VEX.F3.0F 0x76 - invalid */
2912/* Opcode VEX.F2.0F 0x76 - invalid */
2913
2914
2915/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
2916FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
2917/* Opcode VEX.66.0F 0x77 - invalid */
2918/* Opcode VEX.F3.0F 0x77 - invalid */
2919/* Opcode VEX.F2.0F 0x77 - invalid */
2920
2921/* Opcode VEX.0F 0x78 - invalid */
2922/* Opcode VEX.66.0F 0x78 - invalid */
2923/* Opcode VEX.F3.0F 0x78 - invalid */
2924/* Opcode VEX.F2.0F 0x78 - invalid */
2925
2926/* Opcode VEX.0F 0x79 - invalid */
2927/* Opcode VEX.66.0F 0x79 - invalid */
2928/* Opcode VEX.F3.0F 0x79 - invalid */
2929/* Opcode VEX.F2.0F 0x79 - invalid */
2930
2931/* Opcode VEX.0F 0x7a - invalid */
2932/* Opcode VEX.66.0F 0x7a - invalid */
2933/* Opcode VEX.F3.0F 0x7a - invalid */
2934/* Opcode VEX.F2.0F 0x7a - invalid */
2935
2936/* Opcode VEX.0F 0x7b - invalid */
2937/* Opcode VEX.66.0F 0x7b - invalid */
2938/* Opcode VEX.F3.0F 0x7b - invalid */
2939/* Opcode VEX.F2.0F 0x7b - invalid */
2940
2941/* Opcode VEX.0F 0x7c - invalid */
2942/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2943FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2944/* Opcode VEX.F3.0F 0x7c - invalid */
2945/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2946FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2947
2948/* Opcode VEX.0F 0x7d - invalid */
2949/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2950FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2951/* Opcode VEX.F3.0F 0x7d - invalid */
2952/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2953FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2954
2955
2956/* Opcode VEX.0F 0x7e - invalid */
2957
2958FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2959{
2960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2961 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2962 {
2963 /**
2964 * @opcode 0x7e
2965 * @opcodesub rex.w=1
2966 * @oppfx 0x66
2967 * @opcpuid avx
2968 * @opgroup og_avx_simdint_datamov
2969 * @opxcpttype 5
2970 * @optest 64-bit / op1=1 op2=2 -> op1=2
2971 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2972 */
2973 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2974 if (IEM_IS_MODRM_REG_MODE(bRm))
2975 {
2976 /* greg64, XMM */
2977 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2978 IEM_MC_BEGIN(0, 1);
2979 IEM_MC_LOCAL(uint64_t, u64Tmp);
2980
2981 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2982 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2983
2984 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
2985 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
2986
2987 IEM_MC_ADVANCE_RIP();
2988 IEM_MC_END();
2989 }
2990 else
2991 {
2992 /* [mem64], XMM */
2993 IEM_MC_BEGIN(0, 2);
2994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2995 IEM_MC_LOCAL(uint64_t, u64Tmp);
2996
2997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2999 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3000 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3001
3002 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3003 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3004
3005 IEM_MC_ADVANCE_RIP();
3006 IEM_MC_END();
3007 }
3008 }
3009 else
3010 {
3011 /**
3012 * @opdone
3013 * @opcode 0x7e
3014 * @opcodesub rex.w=0
3015 * @oppfx 0x66
3016 * @opcpuid avx
3017 * @opgroup og_avx_simdint_datamov
3018 * @opxcpttype 5
3019 * @opfunction iemOp_vmovd_q_Vy_Ey
3020 * @optest op1=1 op2=2 -> op1=2
3021 * @optest op1=0 op2=-42 -> op1=-42
3022 */
3023 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3024 if (IEM_IS_MODRM_REG_MODE(bRm))
3025 {
3026 /* greg32, XMM */
3027 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3028 IEM_MC_BEGIN(0, 1);
3029 IEM_MC_LOCAL(uint32_t, u32Tmp);
3030
3031 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3032 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3033
3034 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3035 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3036
3037 IEM_MC_ADVANCE_RIP();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* [mem32], XMM */
3043 IEM_MC_BEGIN(0, 2);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3045 IEM_MC_LOCAL(uint32_t, u32Tmp);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3048 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3049 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3050 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3051
3052 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3053 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3054
3055 IEM_MC_ADVANCE_RIP();
3056 IEM_MC_END();
3057 }
3058 }
3059 return VINF_SUCCESS;
3060}
3061
3062/**
3063 * @opcode 0x7e
3064 * @oppfx 0xf3
3065 * @opcpuid avx
3066 * @opgroup og_avx_pcksclr_datamove
3067 * @opxcpttype none
3068 * @optest op1=1 op2=2 -> op1=2
3069 * @optest op1=0 op2=-42 -> op1=-42
3070 */
3071FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3072{
3073 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3074 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3075 if (IEM_IS_MODRM_REG_MODE(bRm))
3076 {
3077 /*
3078 * Register, register.
3079 */
3080 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3081 IEM_MC_BEGIN(0, 0);
3082
3083 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3084 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3085
3086 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3087 IEM_GET_MODRM_RM(pVCpu, bRm));
3088 IEM_MC_ADVANCE_RIP();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /*
3094 * Memory, register.
3095 */
3096 IEM_MC_BEGIN(0, 2);
3097 IEM_MC_LOCAL(uint64_t, uSrc);
3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3099
3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3101 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3104
3105 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3106 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3107
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 return VINF_SUCCESS;
3112
3113}
3114/* Opcode VEX.F2.0F 0x7e - invalid */
3115
3116
3117/* Opcode VEX.0F 0x7f - invalid */
3118
3119/**
3120 * @opcode 0x7f
3121 * @oppfx 0x66
3122 * @opcpuid avx
3123 * @opgroup og_avx_simdint_datamove
3124 * @opxcpttype 1
3125 * @optest op1=1 op2=2 -> op1=2
3126 * @optest op1=0 op2=-42 -> op1=-42
3127 */
3128FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3129{
3130 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3131 Assert(pVCpu->iem.s.uVexLength <= 1);
3132 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3133 if (IEM_IS_MODRM_REG_MODE(bRm))
3134 {
3135 /*
3136 * Register, register.
3137 */
3138 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3139 IEM_MC_BEGIN(0, 0);
3140
3141 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3142 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3143 if (pVCpu->iem.s.uVexLength == 0)
3144 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3145 IEM_GET_MODRM_REG(pVCpu, bRm));
3146 else
3147 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3148 IEM_GET_MODRM_REG(pVCpu, bRm));
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else if (pVCpu->iem.s.uVexLength == 0)
3153 {
3154 /*
3155 * Register, memory128.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3165
3166 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3167 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3168
3169 IEM_MC_ADVANCE_RIP();
3170 IEM_MC_END();
3171 }
3172 else
3173 {
3174 /*
3175 * Register, memory256.
3176 */
3177 IEM_MC_BEGIN(0, 2);
3178 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3180
3181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3182 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3184 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3185
3186 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3187 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3188
3189 IEM_MC_ADVANCE_RIP();
3190 IEM_MC_END();
3191 }
3192 return VINF_SUCCESS;
3193}
3194
3195/**
3196 * @opcode 0x7f
3197 * @oppfx 0xf3
3198 * @opcpuid avx
3199 * @opgroup og_avx_simdint_datamove
3200 * @opxcpttype 4UA
3201 * @optest op1=1 op2=2 -> op1=2
3202 * @optest op1=0 op2=-42 -> op1=-42
3203 */
3204FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3205{
3206 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3207 Assert(pVCpu->iem.s.uVexLength <= 1);
3208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3209 if (IEM_IS_MODRM_REG_MODE(bRm))
3210 {
3211 /*
3212 * Register, register.
3213 */
3214 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3215 IEM_MC_BEGIN(0, 0);
3216
3217 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3218 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3219 if (pVCpu->iem.s.uVexLength == 0)
3220 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3221 IEM_GET_MODRM_REG(pVCpu, bRm));
3222 else
3223 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3224 IEM_GET_MODRM_REG(pVCpu, bRm));
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 }
3228 else if (pVCpu->iem.s.uVexLength == 0)
3229 {
3230 /*
3231 * Register, memory128.
3232 */
3233 IEM_MC_BEGIN(0, 2);
3234 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3236
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3239 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3240 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3241
3242 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3243 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3244
3245 IEM_MC_ADVANCE_RIP();
3246 IEM_MC_END();
3247 }
3248 else
3249 {
3250 /*
3251 * Register, memory256.
3252 */
3253 IEM_MC_BEGIN(0, 2);
3254 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3258 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3260 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3261
3262 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3263 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3264
3265 IEM_MC_ADVANCE_RIP();
3266 IEM_MC_END();
3267 }
3268 return VINF_SUCCESS;
3269}
3270
3271/* Opcode VEX.F2.0F 0x7f - invalid */
3272
3273
3274/* Opcode VEX.0F 0x80 - invalid */
3275/* Opcode VEX.0F 0x81 - invalid */
3276/* Opcode VEX.0F 0x82 - invalid */
3277/* Opcode VEX.0F 0x83 - invalid */
3278/* Opcode VEX.0F 0x84 - invalid */
3279/* Opcode VEX.0F 0x85 - invalid */
3280/* Opcode VEX.0F 0x86 - invalid */
3281/* Opcode VEX.0F 0x87 - invalid */
3282/* Opcode VEX.0F 0x88 - invalid */
3283/* Opcode VEX.0F 0x89 - invalid */
3284/* Opcode VEX.0F 0x8a - invalid */
3285/* Opcode VEX.0F 0x8b - invalid */
3286/* Opcode VEX.0F 0x8c - invalid */
3287/* Opcode VEX.0F 0x8d - invalid */
3288/* Opcode VEX.0F 0x8e - invalid */
3289/* Opcode VEX.0F 0x8f - invalid */
3290/* Opcode VEX.0F 0x90 - invalid */
3291/* Opcode VEX.0F 0x91 - invalid */
3292/* Opcode VEX.0F 0x92 - invalid */
3293/* Opcode VEX.0F 0x93 - invalid */
3294/* Opcode VEX.0F 0x94 - invalid */
3295/* Opcode VEX.0F 0x95 - invalid */
3296/* Opcode VEX.0F 0x96 - invalid */
3297/* Opcode VEX.0F 0x97 - invalid */
3298/* Opcode VEX.0F 0x98 - invalid */
3299/* Opcode VEX.0F 0x99 - invalid */
3300/* Opcode VEX.0F 0x9a - invalid */
3301/* Opcode VEX.0F 0x9b - invalid */
3302/* Opcode VEX.0F 0x9c - invalid */
3303/* Opcode VEX.0F 0x9d - invalid */
3304/* Opcode VEX.0F 0x9e - invalid */
3305/* Opcode VEX.0F 0x9f - invalid */
3306/* Opcode VEX.0F 0xa0 - invalid */
3307/* Opcode VEX.0F 0xa1 - invalid */
3308/* Opcode VEX.0F 0xa2 - invalid */
3309/* Opcode VEX.0F 0xa3 - invalid */
3310/* Opcode VEX.0F 0xa4 - invalid */
3311/* Opcode VEX.0F 0xa5 - invalid */
3312/* Opcode VEX.0F 0xa6 - invalid */
3313/* Opcode VEX.0F 0xa7 - invalid */
3314/* Opcode VEX.0F 0xa8 - invalid */
3315/* Opcode VEX.0F 0xa9 - invalid */
3316/* Opcode VEX.0F 0xaa - invalid */
3317/* Opcode VEX.0F 0xab - invalid */
3318/* Opcode VEX.0F 0xac - invalid */
3319/* Opcode VEX.0F 0xad - invalid */
3320
3321
3322/* Opcode VEX.0F 0xae mem/0 - invalid. */
3323/* Opcode VEX.0F 0xae mem/1 - invalid. */
3324
3325/**
3326 * @ opmaps grp15
3327 * @ opcode !11/2
3328 * @ oppfx none
3329 * @ opcpuid sse
3330 * @ opgroup og_sse_mxcsrsm
3331 * @ opxcpttype 5
3332 * @ optest op1=0 -> mxcsr=0
3333 * @ optest op1=0x2083 -> mxcsr=0x2083
3334 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3335 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3336 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3337 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3338 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3339 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3340 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3341 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3342 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3343 */
3344FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3345//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3346//{
3347// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3348// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3349// return IEMOP_RAISE_INVALID_OPCODE();
3350//
3351// IEM_MC_BEGIN(2, 0);
3352// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3353// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3354// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3355// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3356// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3357// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3358// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3359// IEM_MC_END();
3360// return VINF_SUCCESS;
3361//}
3362
3363
3364/**
3365 * @opmaps vexgrp15
3366 * @opcode !11/3
3367 * @oppfx none
3368 * @opcpuid avx
3369 * @opgroup og_avx_mxcsrsm
3370 * @opxcpttype 5
3371 * @optest mxcsr=0 -> op1=0
3372 * @optest mxcsr=0x2083 -> op1=0x2083
3373 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3374 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3375 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3376 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3377 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3378 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3379 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3380 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3381 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3382 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3383 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3384 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3385 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3386 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3387 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3388 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3389 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3390 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3391 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3392 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3393 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3394 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3395 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3396 * -> value.xcpt=0x6
3397 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3398 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3399 * APMv4 rev 3.17 page 509.
3400 * @todo Test this instruction on AMD Ryzen.
3401 */
3402FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3403{
3404 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3405 IEM_MC_BEGIN(2, 0);
3406 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3407 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3409 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3410 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3411 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3412 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3413 IEM_MC_END();
3414 return VINF_SUCCESS;
3415}
3416
3417/* Opcode VEX.0F 0xae mem/4 - invalid. */
3418/* Opcode VEX.0F 0xae mem/5 - invalid. */
3419/* Opcode VEX.0F 0xae mem/6 - invalid. */
3420/* Opcode VEX.0F 0xae mem/7 - invalid. */
3421
3422/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3423/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3424/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3425/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3426/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3427/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3428/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3429/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3430
3431/**
3432 * Vex group 15 jump table for memory variant.
3433 */
3434IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3435{ /* pfx: none, 066h, 0f3h, 0f2h */
3436 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3437 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3438 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3439 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3440 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3441 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3442 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3443 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3444};
3445AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3446
3447
3448/** Opcode vex. 0xae. */
3449FNIEMOP_DEF(iemOp_VGrp15)
3450{
3451 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3452 if (IEM_IS_MODRM_REG_MODE(bRm))
3453 /* register, register */
3454 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3455
3456 /* memory, register */
3457 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3458 + pVCpu->iem.s.idxPrefix], bRm);
3459}
3460
3461
3462/* Opcode VEX.0F 0xaf - invalid. */
3463
3464/* Opcode VEX.0F 0xb0 - invalid. */
3465/* Opcode VEX.0F 0xb1 - invalid. */
3466/* Opcode VEX.0F 0xb2 - invalid. */
3467/* Opcode VEX.0F 0xb2 - invalid. */
3468/* Opcode VEX.0F 0xb3 - invalid. */
3469/* Opcode VEX.0F 0xb4 - invalid. */
3470/* Opcode VEX.0F 0xb5 - invalid. */
3471/* Opcode VEX.0F 0xb6 - invalid. */
3472/* Opcode VEX.0F 0xb7 - invalid. */
3473/* Opcode VEX.0F 0xb8 - invalid. */
3474/* Opcode VEX.0F 0xb9 - invalid. */
3475/* Opcode VEX.0F 0xba - invalid. */
3476/* Opcode VEX.0F 0xbb - invalid. */
3477/* Opcode VEX.0F 0xbc - invalid. */
3478/* Opcode VEX.0F 0xbd - invalid. */
3479/* Opcode VEX.0F 0xbe - invalid. */
3480/* Opcode VEX.0F 0xbf - invalid. */
3481
3482/* Opcode VEX.0F 0xc0 - invalid. */
3483/* Opcode VEX.66.0F 0xc0 - invalid. */
3484/* Opcode VEX.F3.0F 0xc0 - invalid. */
3485/* Opcode VEX.F2.0F 0xc0 - invalid. */
3486
3487/* Opcode VEX.0F 0xc1 - invalid. */
3488/* Opcode VEX.66.0F 0xc1 - invalid. */
3489/* Opcode VEX.F3.0F 0xc1 - invalid. */
3490/* Opcode VEX.F2.0F 0xc1 - invalid. */
3491
3492/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3493FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3494/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3495FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3496/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3497FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3498/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3499FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3500
3501/* Opcode VEX.0F 0xc3 - invalid */
3502/* Opcode VEX.66.0F 0xc3 - invalid */
3503/* Opcode VEX.F3.0F 0xc3 - invalid */
3504/* Opcode VEX.F2.0F 0xc3 - invalid */
3505
3506/* Opcode VEX.0F 0xc4 - invalid */
3507/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3508FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3509/* Opcode VEX.F3.0F 0xc4 - invalid */
3510/* Opcode VEX.F2.0F 0xc4 - invalid */
3511
3512/* Opcode VEX.0F 0xc5 - invlid */
3513/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3514FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3515/* Opcode VEX.F3.0F 0xc5 - invalid */
3516/* Opcode VEX.F2.0F 0xc5 - invalid */
3517
3518/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3519FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3520/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3521FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3522/* Opcode VEX.F3.0F 0xc6 - invalid */
3523/* Opcode VEX.F2.0F 0xc6 - invalid */
3524
3525/* Opcode VEX.0F 0xc7 - invalid */
3526/* Opcode VEX.66.0F 0xc7 - invalid */
3527/* Opcode VEX.F3.0F 0xc7 - invalid */
3528/* Opcode VEX.F2.0F 0xc7 - invalid */
3529
3530/* Opcode VEX.0F 0xc8 - invalid */
3531/* Opcode VEX.0F 0xc9 - invalid */
3532/* Opcode VEX.0F 0xca - invalid */
3533/* Opcode VEX.0F 0xcb - invalid */
3534/* Opcode VEX.0F 0xcc - invalid */
3535/* Opcode VEX.0F 0xcd - invalid */
3536/* Opcode VEX.0F 0xce - invalid */
3537/* Opcode VEX.0F 0xcf - invalid */
3538
3539
3540/* Opcode VEX.0F 0xd0 - invalid */
3541/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3542FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3543/* Opcode VEX.F3.0F 0xd0 - invalid */
3544/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3545FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
3546
3547/* Opcode VEX.0F 0xd1 - invalid */
3548/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
3549FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
3550/* Opcode VEX.F3.0F 0xd1 - invalid */
3551/* Opcode VEX.F2.0F 0xd1 - invalid */
3552
3553/* Opcode VEX.0F 0xd2 - invalid */
3554/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
3555FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
3556/* Opcode VEX.F3.0F 0xd2 - invalid */
3557/* Opcode VEX.F2.0F 0xd2 - invalid */
3558
3559/* Opcode VEX.0F 0xd3 - invalid */
3560/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
3561FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
3562/* Opcode VEX.F3.0F 0xd3 - invalid */
3563/* Opcode VEX.F2.0F 0xd3 - invalid */
3564
3565/* Opcode VEX.0F 0xd4 - invalid */
3566
3567
3568/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
3569FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
3570{
3571 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3572 IEMOPMEDIAF3_INIT_VARS( vpaddq);
3573 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3574}
3575
3576
3577/* Opcode VEX.F3.0F 0xd4 - invalid */
3578/* Opcode VEX.F2.0F 0xd4 - invalid */
3579
3580/* Opcode VEX.0F 0xd5 - invalid */
3581/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
3582FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
3583/* Opcode VEX.F3.0F 0xd5 - invalid */
3584/* Opcode VEX.F2.0F 0xd5 - invalid */
3585
3586/* Opcode VEX.0F 0xd6 - invalid */
3587
3588/**
3589 * @opcode 0xd6
3590 * @oppfx 0x66
3591 * @opcpuid avx
3592 * @opgroup og_avx_pcksclr_datamove
3593 * @opxcpttype none
3594 * @optest op1=-1 op2=2 -> op1=2
3595 * @optest op1=0 op2=-42 -> op1=-42
3596 */
3597FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
3598{
3599 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3601 if (IEM_IS_MODRM_REG_MODE(bRm))
3602 {
3603 /*
3604 * Register, register.
3605 */
3606 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3607 IEM_MC_BEGIN(0, 0);
3608
3609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3610 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3611
3612 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3613 IEM_GET_MODRM_REG(pVCpu, bRm));
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 }
3617 else
3618 {
3619 /*
3620 * Memory, register.
3621 */
3622 IEM_MC_BEGIN(0, 2);
3623 IEM_MC_LOCAL(uint64_t, uSrc);
3624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3625
3626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3627 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3628 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3629 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3630
3631 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3632 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3633
3634 IEM_MC_ADVANCE_RIP();
3635 IEM_MC_END();
3636 }
3637 return VINF_SUCCESS;
3638}
3639
3640/* Opcode VEX.F3.0F 0xd6 - invalid */
3641/* Opcode VEX.F2.0F 0xd6 - invalid */
3642
3643
3644/* Opcode VEX.0F 0xd7 - invalid */
3645
3646/** Opcode VEX.66.0F 0xd7 - */
3647FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
3648{
3649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3650 /* Docs says register only. */
3651 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
3652 {
3653 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
3654 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
3655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3656 if (pVCpu->iem.s.uVexLength)
3657 {
3658 IEM_MC_BEGIN(2, 1);
3659 IEM_MC_ARG(uint64_t *, puDst, 0);
3660 IEM_MC_LOCAL(RTUINT256U, uSrc);
3661 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3662 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3663 IEM_MC_PREPARE_AVX_USAGE();
3664 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3665 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3666 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
3667 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
3668 IEM_MC_ADVANCE_RIP();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 IEM_MC_BEGIN(2, 0);
3674 IEM_MC_ARG(uint64_t *, puDst, 0);
3675 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3676 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3677 IEM_MC_PREPARE_AVX_USAGE();
3678 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3679 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3680 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
3681 IEM_MC_ADVANCE_RIP();
3682 IEM_MC_END();
3683 }
3684 return VINF_SUCCESS;
3685 }
3686 return IEMOP_RAISE_INVALID_OPCODE();
3687}
3688
3689
3690/* Opcode VEX.F3.0F 0xd7 - invalid */
3691/* Opcode VEX.F2.0F 0xd7 - invalid */
3692
3693
3694/* Opcode VEX.0F 0xd8 - invalid */
3695/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
3696FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
3697/* Opcode VEX.F3.0F 0xd8 - invalid */
3698/* Opcode VEX.F2.0F 0xd8 - invalid */
3699
3700/* Opcode VEX.0F 0xd9 - invalid */
3701/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
3702FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
3703/* Opcode VEX.F3.0F 0xd9 - invalid */
3704/* Opcode VEX.F2.0F 0xd9 - invalid */
3705
3706/* Opcode VEX.0F 0xda - invalid */
3707/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
3708FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
3709/* Opcode VEX.F3.0F 0xda - invalid */
3710/* Opcode VEX.F2.0F 0xda - invalid */
3711
3712/* Opcode VEX.0F 0xdb - invalid */
3713
3714
3715/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
3716FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
3717{
3718 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3719 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3720 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3721}
3722
3723
3724/* Opcode VEX.F3.0F 0xdb - invalid */
3725/* Opcode VEX.F2.0F 0xdb - invalid */
3726
3727/* Opcode VEX.0F 0xdc - invalid */
3728/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
3729FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
3730/* Opcode VEX.F3.0F 0xdc - invalid */
3731/* Opcode VEX.F2.0F 0xdc - invalid */
3732
3733/* Opcode VEX.0F 0xdd - invalid */
3734/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
3735FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
3736/* Opcode VEX.F3.0F 0xdd - invalid */
3737/* Opcode VEX.F2.0F 0xdd - invalid */
3738
3739/* Opcode VEX.0F 0xde - invalid */
3740/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
3741FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
3742/* Opcode VEX.F3.0F 0xde - invalid */
3743/* Opcode VEX.F2.0F 0xde - invalid */
3744
3745/* Opcode VEX.0F 0xdf - invalid */
3746
3747
3748/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
3749FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
3750{
3751 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3752 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3753 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3754}
3755
3756
3757/* Opcode VEX.F3.0F 0xdf - invalid */
3758/* Opcode VEX.F2.0F 0xdf - invalid */
3759
3760/* Opcode VEX.0F 0xe0 - invalid */
3761/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
3762FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
3763/* Opcode VEX.F3.0F 0xe0 - invalid */
3764/* Opcode VEX.F2.0F 0xe0 - invalid */
3765
3766/* Opcode VEX.0F 0xe1 - invalid */
3767/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
3768FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
3769/* Opcode VEX.F3.0F 0xe1 - invalid */
3770/* Opcode VEX.F2.0F 0xe1 - invalid */
3771
3772/* Opcode VEX.0F 0xe2 - invalid */
3773/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
3774FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
3775/* Opcode VEX.F3.0F 0xe2 - invalid */
3776/* Opcode VEX.F2.0F 0xe2 - invalid */
3777
3778/* Opcode VEX.0F 0xe3 - invalid */
3779/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
3780FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
3781/* Opcode VEX.F3.0F 0xe3 - invalid */
3782/* Opcode VEX.F2.0F 0xe3 - invalid */
3783
3784/* Opcode VEX.0F 0xe4 - invalid */
3785/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
3786FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
3787/* Opcode VEX.F3.0F 0xe4 - invalid */
3788/* Opcode VEX.F2.0F 0xe4 - invalid */
3789
3790/* Opcode VEX.0F 0xe5 - invalid */
3791/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
3792FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
3793/* Opcode VEX.F3.0F 0xe5 - invalid */
3794/* Opcode VEX.F2.0F 0xe5 - invalid */
3795
3796/* Opcode VEX.0F 0xe6 - invalid */
3797/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
3798FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
3799/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
3800FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
3801/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
3802FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
3803
3804
3805/* Opcode VEX.0F 0xe7 - invalid */
3806
3807/**
3808 * @opcode 0xe7
3809 * @opcodesub !11 mr/reg
3810 * @oppfx 0x66
3811 * @opcpuid avx
3812 * @opgroup og_avx_cachect
3813 * @opxcpttype 1
3814 * @optest op1=-1 op2=2 -> op1=2
3815 * @optest op1=0 op2=-42 -> op1=-42
3816 */
3817FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
3818{
3819 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3820 Assert(pVCpu->iem.s.uVexLength <= 1);
3821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3822 if (IEM_IS_MODRM_MEM_MODE(bRm))
3823 {
3824 if (pVCpu->iem.s.uVexLength == 0)
3825 {
3826 /*
3827 * 128-bit: Memory, register.
3828 */
3829 IEM_MC_BEGIN(0, 2);
3830 IEM_MC_LOCAL(RTUINT128U, uSrc);
3831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3832
3833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3834 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3837
3838 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3839 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3840
3841 IEM_MC_ADVANCE_RIP();
3842 IEM_MC_END();
3843 }
3844 else
3845 {
3846 /*
3847 * 256-bit: Memory, register.
3848 */
3849 IEM_MC_BEGIN(0, 2);
3850 IEM_MC_LOCAL(RTUINT256U, uSrc);
3851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3852
3853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3854 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3855 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3856 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3857
3858 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3859 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3860
3861 IEM_MC_ADVANCE_RIP();
3862 IEM_MC_END();
3863 }
3864 return VINF_SUCCESS;
3865 }
3866 /**
3867 * @opdone
3868 * @opmnemonic udvex660fe7reg
3869 * @opcode 0xe7
3870 * @opcodesub 11 mr/reg
3871 * @oppfx 0x66
3872 * @opunused immediate
3873 * @opcpuid avx
3874 * @optest ->
3875 */
3876 return IEMOP_RAISE_INVALID_OPCODE();
3877}
3878
3879/* Opcode VEX.F3.0F 0xe7 - invalid */
3880/* Opcode VEX.F2.0F 0xe7 - invalid */
3881
3882
3883/* Opcode VEX.0F 0xe8 - invalid */
3884/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
3885FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
3886/* Opcode VEX.F3.0F 0xe8 - invalid */
3887/* Opcode VEX.F2.0F 0xe8 - invalid */
3888
3889/* Opcode VEX.0F 0xe9 - invalid */
3890/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
3891FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
3892/* Opcode VEX.F3.0F 0xe9 - invalid */
3893/* Opcode VEX.F2.0F 0xe9 - invalid */
3894
3895/* Opcode VEX.0F 0xea - invalid */
3896/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
3897FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
3898/* Opcode VEX.F3.0F 0xea - invalid */
3899/* Opcode VEX.F2.0F 0xea - invalid */
3900
3901/* Opcode VEX.0F 0xeb - invalid */
3902
3903
3904/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
3905FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
3906{
3907 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3908 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3909 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3910}
3911
3912
3913
3914/* Opcode VEX.F3.0F 0xeb - invalid */
3915/* Opcode VEX.F2.0F 0xeb - invalid */
3916
3917/* Opcode VEX.0F 0xec - invalid */
3918/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
3919FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
3920/* Opcode VEX.F3.0F 0xec - invalid */
3921/* Opcode VEX.F2.0F 0xec - invalid */
3922
3923/* Opcode VEX.0F 0xed - invalid */
3924/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
3925FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
3926/* Opcode VEX.F3.0F 0xed - invalid */
3927/* Opcode VEX.F2.0F 0xed - invalid */
3928
3929/* Opcode VEX.0F 0xee - invalid */
3930/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
3931FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
3932/* Opcode VEX.F3.0F 0xee - invalid */
3933/* Opcode VEX.F2.0F 0xee - invalid */
3934
3935
3936/* Opcode VEX.0F 0xef - invalid */
3937
3938
3939/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
3940FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
3941{
3942 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3943 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3944 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3945}
3946
3947
3948/* Opcode VEX.F3.0F 0xef - invalid */
3949/* Opcode VEX.F2.0F 0xef - invalid */
3950
3951/* Opcode VEX.0F 0xf0 - invalid */
3952/* Opcode VEX.66.0F 0xf0 - invalid */
3953/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
3954FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
3955
3956/* Opcode VEX.0F 0xf1 - invalid */
3957/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
3958FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
3959/* Opcode VEX.F2.0F 0xf1 - invalid */
3960
3961/* Opcode VEX.0F 0xf2 - invalid */
3962/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
3963FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
3964/* Opcode VEX.F2.0F 0xf2 - invalid */
3965
3966/* Opcode VEX.0F 0xf3 - invalid */
3967/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
3968FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
3969/* Opcode VEX.F2.0F 0xf3 - invalid */
3970
3971/* Opcode VEX.0F 0xf4 - invalid */
3972/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
3973FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
3974/* Opcode VEX.F2.0F 0xf4 - invalid */
3975
3976/* Opcode VEX.0F 0xf5 - invalid */
3977/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
3978FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
3979/* Opcode VEX.F2.0F 0xf5 - invalid */
3980
3981/* Opcode VEX.0F 0xf6 - invalid */
3982/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
3983FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
3984/* Opcode VEX.F2.0F 0xf6 - invalid */
3985
3986/* Opcode VEX.0F 0xf7 - invalid */
3987/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
3988FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
3989/* Opcode VEX.F2.0F 0xf7 - invalid */
3990
3991/* Opcode VEX.0F 0xf8 - invalid */
3992
3993
3994/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
3995FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
3996{
3997 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3998 IEMOPMEDIAF3_INIT_VARS( vpsubb);
3999 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4000}
4001
4002
4003/* Opcode VEX.F2.0F 0xf8 - invalid */
4004
4005/* Opcode VEX.0F 0xf9 - invalid */
4006
4007
4008/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
4009FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
4010{
4011 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4012 IEMOPMEDIAF3_INIT_VARS( vpsubw);
4013 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4014}
4015
4016
4017/* Opcode VEX.F2.0F 0xf9 - invalid */
4018
4019/* Opcode VEX.0F 0xfa - invalid */
4020
4021
4022/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
4023FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
4024{
4025 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4026 IEMOPMEDIAF3_INIT_VARS( vpsubd);
4027 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4028}
4029
4030
4031/* Opcode VEX.F2.0F 0xfa - invalid */
4032
4033/* Opcode VEX.0F 0xfb - invalid */
4034
4035
4036/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
4037FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
4038{
4039 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4040 IEMOPMEDIAF3_INIT_VARS( vpsubq);
4041 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4042}
4043
4044
4045/* Opcode VEX.F2.0F 0xfb - invalid */
4046
4047/* Opcode VEX.0F 0xfc - invalid */
4048
4049
4050/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
4051FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
4052{
4053 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4054 IEMOPMEDIAF3_INIT_VARS( vpaddb);
4055 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4056}
4057
4058
4059/* Opcode VEX.F2.0F 0xfc - invalid */
4060
4061/* Opcode VEX.0F 0xfd - invalid */
4062
4063
4064/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
4065FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
4066{
4067 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4068 IEMOPMEDIAF3_INIT_VARS( vpaddw);
4069 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4070}
4071
4072
4073/* Opcode VEX.F2.0F 0xfd - invalid */
4074
4075/* Opcode VEX.0F 0xfe - invalid */
4076
4077
4078/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
4079FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
4080{
4081 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4082 IEMOPMEDIAF3_INIT_VARS( vpaddd);
4083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4084}
4085
4086
4087/* Opcode VEX.F2.0F 0xfe - invalid */
4088
4089
4090/** Opcode **** 0x0f 0xff - UD0 */
4091FNIEMOP_DEF(iemOp_vud0)
4092{
4093 IEMOP_MNEMONIC(vud0, "vud0");
4094 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
4095 {
4096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
4097#ifndef TST_IEM_CHECK_MC
4098 RTGCPTR GCPtrEff;
4099 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
4100 if (rcStrict != VINF_SUCCESS)
4101 return rcStrict;
4102#endif
4103 IEMOP_HLP_DONE_DECODING();
4104 }
4105 return IEMOP_RAISE_INVALID_OPCODE();
4106}
4107
4108
4109
4110/**
4111 * VEX opcode map \#1.
4112 *
4113 * @sa g_apfnTwoByteMap
4114 */
4115IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
4116{
4117 /* no prefix, 066h prefix f3h prefix, f2h prefix */
4118 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
4119 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
4120 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
4121 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
4122 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
4123 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
4124 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
4125 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
4126 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
4127 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
4128 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
4129 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
4130 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
4131 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
4132 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
4133 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
4134
4135 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
4136 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
4137 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
4138 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4139 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4140 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4141 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
4142 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4143 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
4144 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
4145 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
4146 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
4147 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
4148 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
4149 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
4150 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
4151
4152 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
4153 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
4154 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
4155 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
4156 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
4157 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
4158 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
4159 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
4160 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4161 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4162 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
4163 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4164 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
4165 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
4166 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4167 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4168
4169 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
4170 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
4171 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
4172 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
4173 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
4174 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
4175 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
4176 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
4177 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4178 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4179 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4180 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4181 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4182 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4183 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4184 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4185
4186 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
4187 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
4188 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
4189 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
4190 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
4191 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
4192 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
4193 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
4194 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
4195 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
4196 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
4197 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
4198 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
4199 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
4200 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
4201 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
4202
4203 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4204 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
4205 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4206 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4207 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4208 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4209 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4210 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4211 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
4212 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
4213 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
4214 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
4215 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
4216 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
4217 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
4218 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
4219
4220 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4221 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4222 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4223 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4224 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4225 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4226 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4227 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4228 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4229 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4230 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4231 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4232 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4233 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4234 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4235 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
4236
4237 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
4238 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4239 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4240 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4241 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4242 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4243 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4244 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4245 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
4246 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
4247 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
4248 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
4249 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
4250 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
4251 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
4252 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
4253
4254 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
4255 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
4256 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
4257 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
4258 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
4259 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
4260 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
4261 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
4262 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
4263 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4264 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4265 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4266 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4267 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4268 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4269 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4270
4271 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4272 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4273 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4274 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4275 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4276 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4277 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4278 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4279 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4280 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4281 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4282 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4283 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4284 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4285 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4286 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4287
4288 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4289 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4290 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4291 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4292 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4293 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4294 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4295 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4296 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4297 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4298 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4299 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4300 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4301 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4302 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4303 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4304
4305 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4306 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4307 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4308 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4309 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4310 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4311 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4312 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4313 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4314 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4315 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4316 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4317 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4318 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4319 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4320 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4321
4322 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4323 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4324 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4325 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4326 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4327 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4328 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4329 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4330 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4331 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4332 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4333 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4334 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4335 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4336 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4337 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4338
4339 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4340 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4341 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4342 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4343 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4344 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4345 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4346 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4347 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4348 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4349 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4350 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4351 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4352 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4353 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4354 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4355
4356 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4357 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4358 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4359 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4360 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4361 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4362 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4363 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4364 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4365 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4366 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4367 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4368 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4369 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4370 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4371 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4372
4373 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4374 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4375 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4376 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4377 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4378 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4379 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4380 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4381 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4382 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4383 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4384 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4385 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4386 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4387 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4388 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4389};
4390AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4391/** @} */
4392
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette