VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 95509

Last change on this file since 95509 was 95509, checked in by vboxsync, 2 years ago

VMM/IEM: Implemented vpunpcklbw, vpunpcklwd, vpunpckldq, vpunpcklqdq, vpunpckhbw, vpunpckhwd, vpunpckhdq, vpunpckhqdq and fixed the corresponding SSE and MMX instruction. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 165.2 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 95509 2022-07-04 22:53:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
37 {
38 /*
39 * Register, register.
40 */
41 if (pVCpu->iem.s.uVexLength)
42 {
43 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
64 IEM_MC_BEGIN(4, 0);
65 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
66 IEM_MC_ARG(PRTUINT128U, puDst, 1);
67 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
68 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
69 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
70 IEM_MC_PREPARE_AVX_USAGE();
71 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
72 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
73 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
74 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
75 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 }
80 else
81 {
82 /*
83 * Register, memory.
84 */
85 if (pVCpu->iem.s.uVexLength)
86 {
87 IEM_MC_BEGIN(4, 4);
88 IEM_MC_LOCAL(RTUINT256U, uDst);
89 IEM_MC_LOCAL(RTUINT256U, uSrc1);
90 IEM_MC_LOCAL(RTUINT256U, uSrc2);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
93 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
95 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
96
97 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
98 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
99 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
100 IEM_MC_PREPARE_AVX_USAGE();
101
102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
103 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
104 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
105 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
106
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 IEM_MC_BEGIN(4, 2);
113 IEM_MC_LOCAL(RTUINT128U, uSrc2);
114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
115 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
116 IEM_MC_ARG(PRTUINT128U, puDst, 1);
117 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
119
120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
121 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
122 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
123 IEM_MC_PREPARE_AVX_USAGE();
124
125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
126 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
127 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
128 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
129 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
130
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 }
134 }
135 return VINF_SUCCESS;
136}
137
138
139/**
140 * Common worker for AVX2 instructions on the forms:
141 * - vpxxx xmm0, xmm1, xmm2/mem128
142 * - vpxxx ymm0, ymm1, ymm2/mem256
143 *
144 * Takes function table for function w/o implicit state parameter.
145 *
146 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
147 */
148FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
149{
150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
152 {
153 /*
154 * Register, register.
155 */
156 if (pVCpu->iem.s.uVexLength)
157 {
158 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
159 IEM_MC_BEGIN(3, 3);
160 IEM_MC_LOCAL(RTUINT256U, uDst);
161 IEM_MC_LOCAL(RTUINT256U, uSrc1);
162 IEM_MC_LOCAL(RTUINT256U, uSrc2);
163 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
164 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
165 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
166 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
167 IEM_MC_PREPARE_AVX_USAGE();
168 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
169 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
171 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
172 IEM_MC_ADVANCE_RIP();
173 IEM_MC_END();
174 }
175 else
176 {
177 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
178 IEM_MC_BEGIN(3, 0);
179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
180 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
181 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
182 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
183 IEM_MC_PREPARE_AVX_USAGE();
184 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
185 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
188 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
189 IEM_MC_ADVANCE_RIP();
190 IEM_MC_END();
191 }
192 }
193 else
194 {
195 /*
196 * Register, memory.
197 */
198 if (pVCpu->iem.s.uVexLength)
199 {
200 IEM_MC_BEGIN(3, 4);
201 IEM_MC_LOCAL(RTUINT256U, uDst);
202 IEM_MC_LOCAL(RTUINT256U, uSrc1);
203 IEM_MC_LOCAL(RTUINT256U, uSrc2);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
206 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
207 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
208
209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
210 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
211 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
212 IEM_MC_PREPARE_AVX_USAGE();
213
214 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
215 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
217 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
218
219 IEM_MC_ADVANCE_RIP();
220 IEM_MC_END();
221 }
222 else
223 {
224 IEM_MC_BEGIN(3, 2);
225 IEM_MC_LOCAL(RTUINT128U, uSrc2);
226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
227 IEM_MC_ARG(PRTUINT128U, puDst, 0);
228 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
229 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
230
231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
232 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
233 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
234 IEM_MC_PREPARE_AVX_USAGE();
235
236 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
237 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
238 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
240 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
241
242 IEM_MC_ADVANCE_RIP();
243 IEM_MC_END();
244 }
245 }
246 return VINF_SUCCESS;
247}
248
249
250/**
251 * Common worker for AVX2 instructions on the forms:
252 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
253 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
254 *
255 * The 128-bit memory version of this instruction may elect to skip fetching the
256 * lower 64 bits of the operand. We, however, do not.
257 *
258 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
259 */
260FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
261{
262 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
263}
264
265
266/**
267 * Common worker for AVX2 instructions on the forms:
268 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
269 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
270 *
271 * The 128-bit memory version of this instruction may elect to skip fetching the
272 * higher 64 bits of the operand. We, however, do not.
273 *
274 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
275 */
276FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
277{
278 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
279}
280
281
282
283/* Opcode VEX.0F 0x00 - invalid */
284/* Opcode VEX.0F 0x01 - invalid */
285/* Opcode VEX.0F 0x02 - invalid */
286/* Opcode VEX.0F 0x03 - invalid */
287/* Opcode VEX.0F 0x04 - invalid */
288/* Opcode VEX.0F 0x05 - invalid */
289/* Opcode VEX.0F 0x06 - invalid */
290/* Opcode VEX.0F 0x07 - invalid */
291/* Opcode VEX.0F 0x08 - invalid */
292/* Opcode VEX.0F 0x09 - invalid */
293/* Opcode VEX.0F 0x0a - invalid */
294
295/** Opcode VEX.0F 0x0b. */
296FNIEMOP_DEF(iemOp_vud2)
297{
298 IEMOP_MNEMONIC(vud2, "vud2");
299 return IEMOP_RAISE_INVALID_OPCODE();
300}
301
302/* Opcode VEX.0F 0x0c - invalid */
303/* Opcode VEX.0F 0x0d - invalid */
304/* Opcode VEX.0F 0x0e - invalid */
305/* Opcode VEX.0F 0x0f - invalid */
306
307
308/**
309 * @opcode 0x10
310 * @oppfx none
311 * @opcpuid avx
312 * @opgroup og_avx_simdfp_datamove
313 * @opxcpttype 4UA
314 * @optest op1=1 op2=2 -> op1=2
315 * @optest op1=0 op2=-22 -> op1=-22
316 */
317FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
318{
319 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
320 Assert(pVCpu->iem.s.uVexLength <= 1);
321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
322 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
323 {
324 /*
325 * Register, register.
326 */
327 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
328 IEM_MC_BEGIN(0, 0);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
331 if (pVCpu->iem.s.uVexLength == 0)
332 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
333 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
334 else
335 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
336 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
337 IEM_MC_ADVANCE_RIP();
338 IEM_MC_END();
339 }
340 else if (pVCpu->iem.s.uVexLength == 0)
341 {
342 /*
343 * 128-bit: Register, Memory
344 */
345 IEM_MC_BEGIN(0, 2);
346 IEM_MC_LOCAL(RTUINT128U, uSrc);
347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
348
349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
352 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
353
354 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
355 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
356
357 IEM_MC_ADVANCE_RIP();
358 IEM_MC_END();
359 }
360 else
361 {
362 /*
363 * 256-bit: Register, Memory
364 */
365 IEM_MC_BEGIN(0, 2);
366 IEM_MC_LOCAL(RTUINT256U, uSrc);
367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
368
369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
370 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
371 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
372 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
373
374 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
375 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
376
377 IEM_MC_ADVANCE_RIP();
378 IEM_MC_END();
379 }
380 return VINF_SUCCESS;
381}
382
383
384/**
385 * @opcode 0x10
386 * @oppfx 0x66
387 * @opcpuid avx
388 * @opgroup og_avx_simdfp_datamove
389 * @opxcpttype 4UA
390 * @optest op1=1 op2=2 -> op1=2
391 * @optest op1=0 op2=-22 -> op1=-22
392 */
393FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
394{
395 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
396 Assert(pVCpu->iem.s.uVexLength <= 1);
397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
399 {
400 /*
401 * Register, register.
402 */
403 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
404 IEM_MC_BEGIN(0, 0);
405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
407 if (pVCpu->iem.s.uVexLength == 0)
408 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
409 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
410 else
411 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
412 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
413 IEM_MC_ADVANCE_RIP();
414 IEM_MC_END();
415 }
416 else if (pVCpu->iem.s.uVexLength == 0)
417 {
418 /*
419 * 128-bit: Memory, register.
420 */
421 IEM_MC_BEGIN(0, 2);
422 IEM_MC_LOCAL(RTUINT128U, uSrc);
423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
424
425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
426 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
428 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
429
430 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
431 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 }
436 else
437 {
438 /*
439 * 256-bit: Memory, register.
440 */
441 IEM_MC_BEGIN(0, 2);
442 IEM_MC_LOCAL(RTUINT256U, uSrc);
443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
444
445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
446 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
447 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
448 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
449
450 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
451 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
452
453 IEM_MC_ADVANCE_RIP();
454 IEM_MC_END();
455 }
456 return VINF_SUCCESS;
457}
458
459
460FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
461{
462 Assert(pVCpu->iem.s.uVexLength <= 1);
463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
465 {
466 /**
467 * @opcode 0x10
468 * @oppfx 0xf3
469 * @opcodesub 11 mr/reg
470 * @opcpuid avx
471 * @opgroup og_avx_simdfp_datamerge
472 * @opxcpttype 5
473 * @optest op1=1 op2=0 op3=2 -> op1=2
474 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
475 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
476 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
477 * @note HssHi refers to bits 127:32.
478 */
479 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
480 IEMOP_HLP_DONE_VEX_DECODING();
481 IEM_MC_BEGIN(0, 0);
482
483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
485 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
486 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
487 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 }
491 else
492 {
493 /**
494 * @opdone
495 * @opcode 0x10
496 * @oppfx 0xf3
497 * @opcodesub !11 mr/reg
498 * @opcpuid avx
499 * @opgroup og_avx_simdfp_datamove
500 * @opxcpttype 5
501 * @opfunction iemOp_vmovss_Vss_Hss_Wss
502 * @optest op1=1 op2=2 -> op1=2
503 * @optest op1=0 op2=-22 -> op1=-22
504 */
505 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
506 IEM_MC_BEGIN(0, 2);
507 IEM_MC_LOCAL(uint32_t, uSrc);
508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
509
510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
511 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
514
515 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
516 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
517
518 IEM_MC_ADVANCE_RIP();
519 IEM_MC_END();
520 }
521
522 return VINF_SUCCESS;
523}
524
525
526FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
527{
528 Assert(pVCpu->iem.s.uVexLength <= 1);
529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
531 {
532 /**
533 * @opcode 0x10
534 * @oppfx 0xf2
535 * @opcodesub 11 mr/reg
536 * @opcpuid avx
537 * @opgroup og_avx_simdfp_datamerge
538 * @opxcpttype 5
539 * @optest op1=1 op2=0 op3=2 -> op1=2
540 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
541 * @optest op1=3 op2=-1 op3=0x77 ->
542 * op1=0xffffffffffffffff0000000000000077
543 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
544 */
545 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
546 IEMOP_HLP_DONE_VEX_DECODING();
547 IEM_MC_BEGIN(0, 0);
548
549 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
550 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
551 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
552 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
553 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
554 IEM_MC_ADVANCE_RIP();
555 IEM_MC_END();
556 }
557 else
558 {
559 /**
560 * @opdone
561 * @opcode 0x10
562 * @oppfx 0xf2
563 * @opcodesub !11 mr/reg
564 * @opcpuid avx
565 * @opgroup og_avx_simdfp_datamove
566 * @opxcpttype 5
567 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
568 * @optest op1=1 op2=2 -> op1=2
569 * @optest op1=0 op2=-22 -> op1=-22
570 */
571 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
572 IEM_MC_BEGIN(0, 2);
573 IEM_MC_LOCAL(uint64_t, uSrc);
574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
575
576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
577 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
578 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
579 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
580
581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
582 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
583
584 IEM_MC_ADVANCE_RIP();
585 IEM_MC_END();
586 }
587
588 return VINF_SUCCESS;
589}
590
591
592/**
593 * @opcode 0x11
594 * @oppfx none
595 * @opcpuid avx
596 * @opgroup og_avx_simdfp_datamove
597 * @opxcpttype 4UA
598 * @optest op1=1 op2=2 -> op1=2
599 * @optest op1=0 op2=-22 -> op1=-22
600 */
601FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
602{
603 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
604 Assert(pVCpu->iem.s.uVexLength <= 1);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
606 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
607 {
608 /*
609 * Register, register.
610 */
611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
612 IEM_MC_BEGIN(0, 0);
613 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
614 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
615 if (pVCpu->iem.s.uVexLength == 0)
616 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
617 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
618 else
619 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
620 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 }
624 else if (pVCpu->iem.s.uVexLength == 0)
625 {
626 /*
627 * 128-bit: Memory, register.
628 */
629 IEM_MC_BEGIN(0, 2);
630 IEM_MC_LOCAL(RTUINT128U, uSrc);
631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
632
633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
634 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
635 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
636 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
637
638 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
639 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
640
641 IEM_MC_ADVANCE_RIP();
642 IEM_MC_END();
643 }
644 else
645 {
646 /*
647 * 256-bit: Memory, register.
648 */
649 IEM_MC_BEGIN(0, 2);
650 IEM_MC_LOCAL(RTUINT256U, uSrc);
651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
652
653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
654 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
656 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
657
658 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
659 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
660
661 IEM_MC_ADVANCE_RIP();
662 IEM_MC_END();
663 }
664 return VINF_SUCCESS;
665}
666
667
668/**
669 * @opcode 0x11
670 * @oppfx 0x66
671 * @opcpuid avx
672 * @opgroup og_avx_simdfp_datamove
673 * @opxcpttype 4UA
674 * @optest op1=1 op2=2 -> op1=2
675 * @optest op1=0 op2=-22 -> op1=-22
676 */
677FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
678{
679 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
680 Assert(pVCpu->iem.s.uVexLength <= 1);
681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
683 {
684 /*
685 * Register, register.
686 */
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_BEGIN(0, 0);
689 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
690 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
691 if (pVCpu->iem.s.uVexLength == 0)
692 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
693 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
694 else
695 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
696 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
697 IEM_MC_ADVANCE_RIP();
698 IEM_MC_END();
699 }
700 else if (pVCpu->iem.s.uVexLength == 0)
701 {
702 /*
703 * 128-bit: Memory, register.
704 */
705 IEM_MC_BEGIN(0, 2);
706 IEM_MC_LOCAL(RTUINT128U, uSrc);
707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
708
709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
710 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
712 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
713
714 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
715 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
716
717 IEM_MC_ADVANCE_RIP();
718 IEM_MC_END();
719 }
720 else
721 {
722 /*
723 * 256-bit: Memory, register.
724 */
725 IEM_MC_BEGIN(0, 2);
726 IEM_MC_LOCAL(RTUINT256U, uSrc);
727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
728
729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
730 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
731 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
732 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
733
734 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
735 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
736
737 IEM_MC_ADVANCE_RIP();
738 IEM_MC_END();
739 }
740 return VINF_SUCCESS;
741}
742
743
744FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
745{
746 Assert(pVCpu->iem.s.uVexLength <= 1);
747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
748 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
749 {
750 /**
751 * @opcode 0x11
752 * @oppfx 0xf3
753 * @opcodesub 11 mr/reg
754 * @opcpuid avx
755 * @opgroup og_avx_simdfp_datamerge
756 * @opxcpttype 5
757 * @optest op1=1 op2=0 op3=2 -> op1=2
758 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
759 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
760 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
761 */
762 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
763 IEMOP_HLP_DONE_VEX_DECODING();
764 IEM_MC_BEGIN(0, 0);
765
766 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
767 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
768 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
769 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
770 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
771 IEM_MC_ADVANCE_RIP();
772 IEM_MC_END();
773 }
774 else
775 {
776 /**
777 * @opdone
778 * @opcode 0x11
779 * @oppfx 0xf3
780 * @opcodesub !11 mr/reg
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 5
784 * @opfunction iemOp_vmovss_Vss_Hss_Wss
785 * @optest op1=1 op2=2 -> op1=2
786 * @optest op1=0 op2=-22 -> op1=-22
787 */
788 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
789 IEM_MC_BEGIN(0, 2);
790 IEM_MC_LOCAL(uint32_t, uSrc);
791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
792
793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
794 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
795 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
796 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
797
798 IEM_MC_FETCH_YREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
799 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
800
801 IEM_MC_ADVANCE_RIP();
802 IEM_MC_END();
803 }
804
805 return VINF_SUCCESS;
806}
807
808
809FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
810{
811 Assert(pVCpu->iem.s.uVexLength <= 1);
812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
814 {
815 /**
816 * @opcode 0x11
817 * @oppfx 0xf2
818 * @opcodesub 11 mr/reg
819 * @opcpuid avx
820 * @opgroup og_avx_simdfp_datamerge
821 * @opxcpttype 5
822 * @optest op1=1 op2=0 op3=2 -> op1=2
823 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
824 * @optest op1=3 op2=-1 op3=0x77 ->
825 * op1=0xffffffffffffffff0000000000000077
826 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
827 */
828 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
829 IEMOP_HLP_DONE_VEX_DECODING();
830 IEM_MC_BEGIN(0, 0);
831
832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
833 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
834 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
835 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
836 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
837 IEM_MC_ADVANCE_RIP();
838 IEM_MC_END();
839 }
840 else
841 {
842 /**
843 * @opdone
844 * @opcode 0x11
845 * @oppfx 0xf2
846 * @opcodesub !11 mr/reg
847 * @opcpuid avx
848 * @opgroup og_avx_simdfp_datamove
849 * @opxcpttype 5
850 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
851 * @optest op1=1 op2=2 -> op1=2
852 * @optest op1=0 op2=-22 -> op1=-22
853 */
854 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
855 IEM_MC_BEGIN(0, 2);
856 IEM_MC_LOCAL(uint64_t, uSrc);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
861 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
862 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
863
864 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
865 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
866
867 IEM_MC_ADVANCE_RIP();
868 IEM_MC_END();
869 }
870
871 return VINF_SUCCESS;
872}
873
874
875FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
876{
877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
879 {
880 /**
881 * @opcode 0x12
882 * @opcodesub 11 mr/reg
883 * @oppfx none
884 * @opcpuid avx
885 * @opgroup og_avx_simdfp_datamerge
886 * @opxcpttype 7LZ
887 * @optest op2=0x2200220122022203
888 * op3=0x3304330533063307
889 * -> op1=0x22002201220222033304330533063307
890 * @optest op2=-1 op3=-42 -> op1=-42
891 * @note op3 and op2 are only the 8-byte high XMM register halfs.
892 */
893 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
894
895 IEMOP_HLP_DONE_VEX_DECODING_L0();
896 IEM_MC_BEGIN(0, 0);
897
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
900 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
901 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
902 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
903
904 IEM_MC_ADVANCE_RIP();
905 IEM_MC_END();
906 }
907 else
908 {
909 /**
910 * @opdone
911 * @opcode 0x12
912 * @opcodesub !11 mr/reg
913 * @oppfx none
914 * @opcpuid avx
915 * @opgroup og_avx_simdfp_datamove
916 * @opxcpttype 5LZ
917 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
918 * @optest op1=1 op2=0 op3=0 -> op1=0
919 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
920 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
921 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
922 */
923 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
924
925 IEM_MC_BEGIN(0, 2);
926 IEM_MC_LOCAL(uint64_t, uSrc);
927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
928
929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
930 IEMOP_HLP_DONE_VEX_DECODING_L0();
931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
932 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
933
934 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
935 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
936 uSrc,
937 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
938
939 IEM_MC_ADVANCE_RIP();
940 IEM_MC_END();
941 }
942 return VINF_SUCCESS;
943}
944
945
946/**
947 * @opcode 0x12
948 * @opcodesub !11 mr/reg
949 * @oppfx 0x66
950 * @opcpuid avx
951 * @opgroup og_avx_pcksclr_datamerge
952 * @opxcpttype 5LZ
953 * @optest op2=0 op3=2 -> op1=2
954 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
955 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
956 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
957 */
958FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
959{
960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
961 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
962 {
963 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
964
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_L0();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
973
974 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
975 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
976 uSrc,
977 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
978
979 IEM_MC_ADVANCE_RIP();
980 IEM_MC_END();
981 return VINF_SUCCESS;
982 }
983
984 /**
985 * @opdone
986 * @opmnemonic udvex660f12m3
987 * @opcode 0x12
988 * @opcodesub 11 mr/reg
989 * @oppfx 0x66
990 * @opunused immediate
991 * @opcpuid avx
992 * @optest ->
993 */
994 return IEMOP_RAISE_INVALID_OPCODE();
995}
996
997
998/**
999 * @opcode 0x12
1000 * @oppfx 0xf3
1001 * @opcpuid avx
1002 * @opgroup og_avx_pcksclr_datamove
1003 * @opxcpttype 4
1004 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1005 * -> op1=0x00000002000000020000000100000001
1006 * @optest vex.l==1 /
1007 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1008 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1009 */
1010FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1011{
1012 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1013 Assert(pVCpu->iem.s.uVexLength <= 1);
1014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1016 {
1017 /*
1018 * Register, register.
1019 */
1020 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1021 if (pVCpu->iem.s.uVexLength == 0)
1022 {
1023 IEM_MC_BEGIN(2, 0);
1024 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1025 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1026
1027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1028 IEM_MC_PREPARE_AVX_USAGE();
1029
1030 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1031 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1032 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1033 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1034
1035 IEM_MC_ADVANCE_RIP();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 IEM_MC_BEGIN(3, 0);
1041 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1042 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1043 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
1044
1045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1046 IEM_MC_PREPARE_AVX_USAGE();
1047 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 }
1053 else
1054 {
1055 /*
1056 * Register, memory.
1057 */
1058 if (pVCpu->iem.s.uVexLength == 0)
1059 {
1060 IEM_MC_BEGIN(2, 2);
1061 IEM_MC_LOCAL(RTUINT128U, uSrc);
1062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1063 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1064 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1065
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1067 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1069 IEM_MC_PREPARE_AVX_USAGE();
1070
1071 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1073 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1074 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1075
1076 IEM_MC_ADVANCE_RIP();
1077 IEM_MC_END();
1078 }
1079 else
1080 {
1081 IEM_MC_BEGIN(3, 2);
1082 IEM_MC_LOCAL(RTUINT256U, uSrc);
1083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1084 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1085 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1086 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1087
1088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1089 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1091 IEM_MC_PREPARE_AVX_USAGE();
1092
1093 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1094 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1095
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 }
1099 }
1100 return VINF_SUCCESS;
1101}
1102
1103
1104/**
1105 * @opcode 0x12
1106 * @oppfx 0xf2
1107 * @opcpuid avx
1108 * @opgroup og_avx_pcksclr_datamove
1109 * @opxcpttype 5
1110 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1111 * -> op1=0x22222222111111112222222211111111
1112 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1113 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1114 */
1115FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1116{
1117 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1120 {
1121 /*
1122 * Register, register.
1123 */
1124 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1125 if (pVCpu->iem.s.uVexLength == 0)
1126 {
1127 IEM_MC_BEGIN(2, 0);
1128 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1129 IEM_MC_ARG(uint64_t, uSrc, 1);
1130
1131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1132 IEM_MC_PREPARE_AVX_USAGE();
1133
1134 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1135 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1136 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1137 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1138
1139 IEM_MC_ADVANCE_RIP();
1140 IEM_MC_END();
1141 }
1142 else
1143 {
1144 IEM_MC_BEGIN(3, 0);
1145 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1146 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1147 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
1148
1149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1150 IEM_MC_PREPARE_AVX_USAGE();
1151 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1152
1153 IEM_MC_ADVANCE_RIP();
1154 IEM_MC_END();
1155 }
1156 }
1157 else
1158 {
1159 /*
1160 * Register, memory.
1161 */
1162 if (pVCpu->iem.s.uVexLength == 0)
1163 {
1164 IEM_MC_BEGIN(2, 2);
1165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1166 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1167 IEM_MC_ARG(uint64_t, uSrc, 1);
1168
1169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1170 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1171 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1172 IEM_MC_PREPARE_AVX_USAGE();
1173
1174 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1175 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1176 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1177 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1178
1179 IEM_MC_ADVANCE_RIP();
1180 IEM_MC_END();
1181 }
1182 else
1183 {
1184 IEM_MC_BEGIN(3, 2);
1185 IEM_MC_LOCAL(RTUINT256U, uSrc);
1186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1187 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1188 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1189 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1190
1191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1192 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1194 IEM_MC_PREPARE_AVX_USAGE();
1195
1196 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1197 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1198
1199 IEM_MC_ADVANCE_RIP();
1200 IEM_MC_END();
1201 }
1202 }
1203 return VINF_SUCCESS;
1204}
1205
1206
1207/**
1208 * @opcode 0x13
1209 * @opcodesub !11 mr/reg
1210 * @oppfx none
1211 * @opcpuid avx
1212 * @opgroup og_avx_simdfp_datamove
1213 * @opxcpttype 5
1214 * @optest op1=1 op2=2 -> op1=2
1215 * @optest op1=0 op2=-42 -> op1=-42
1216 */
1217FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1218{
1219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1220 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1221 {
1222 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1223
1224 IEM_MC_BEGIN(0, 2);
1225 IEM_MC_LOCAL(uint64_t, uSrc);
1226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1227
1228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1229 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1232
1233 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1234 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1235
1236 IEM_MC_ADVANCE_RIP();
1237 IEM_MC_END();
1238 return VINF_SUCCESS;
1239 }
1240
1241 /**
1242 * @opdone
1243 * @opmnemonic udvex0f13m3
1244 * @opcode 0x13
1245 * @opcodesub 11 mr/reg
1246 * @oppfx none
1247 * @opunused immediate
1248 * @opcpuid avx
1249 * @optest ->
1250 */
1251 return IEMOP_RAISE_INVALID_OPCODE();
1252}
1253
1254
1255/**
1256 * @opcode 0x13
1257 * @opcodesub !11 mr/reg
1258 * @oppfx 0x66
1259 * @opcpuid avx
1260 * @opgroup og_avx_pcksclr_datamove
1261 * @opxcpttype 5
1262 * @optest op1=1 op2=2 -> op1=2
1263 * @optest op1=0 op2=-42 -> op1=-42
1264 */
1265FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1266{
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1269 {
1270 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1271 IEM_MC_BEGIN(0, 2);
1272 IEM_MC_LOCAL(uint64_t, uSrc);
1273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1274
1275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1276 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1277 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1278 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1279
1280 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1281 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1282
1283 IEM_MC_ADVANCE_RIP();
1284 IEM_MC_END();
1285 return VINF_SUCCESS;
1286 }
1287
1288 /**
1289 * @opdone
1290 * @opmnemonic udvex660f13m3
1291 * @opcode 0x13
1292 * @opcodesub 11 mr/reg
1293 * @oppfx 0x66
1294 * @opunused immediate
1295 * @opcpuid avx
1296 * @optest ->
1297 */
1298 return IEMOP_RAISE_INVALID_OPCODE();
1299}
1300
1301/* Opcode VEX.F3.0F 0x13 - invalid */
1302/* Opcode VEX.F2.0F 0x13 - invalid */
1303
1304/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1305FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1306/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1307FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1308/* Opcode VEX.F3.0F 0x14 - invalid */
1309/* Opcode VEX.F2.0F 0x14 - invalid */
1310/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1311FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1312/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1313FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1314/* Opcode VEX.F3.0F 0x15 - invalid */
1315/* Opcode VEX.F2.0F 0x15 - invalid */
1316/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1317FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1318/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1319FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1320/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1321FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1322/* Opcode VEX.F2.0F 0x16 - invalid */
1323/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
1324FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1325/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
1326FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1327/* Opcode VEX.F3.0F 0x17 - invalid */
1328/* Opcode VEX.F2.0F 0x17 - invalid */
1329
1330
1331/* Opcode VEX.0F 0x18 - invalid */
1332/* Opcode VEX.0F 0x19 - invalid */
1333/* Opcode VEX.0F 0x1a - invalid */
1334/* Opcode VEX.0F 0x1b - invalid */
1335/* Opcode VEX.0F 0x1c - invalid */
1336/* Opcode VEX.0F 0x1d - invalid */
1337/* Opcode VEX.0F 0x1e - invalid */
1338/* Opcode VEX.0F 0x1f - invalid */
1339
1340/* Opcode VEX.0F 0x20 - invalid */
1341/* Opcode VEX.0F 0x21 - invalid */
1342/* Opcode VEX.0F 0x22 - invalid */
1343/* Opcode VEX.0F 0x23 - invalid */
1344/* Opcode VEX.0F 0x24 - invalid */
1345/* Opcode VEX.0F 0x25 - invalid */
1346/* Opcode VEX.0F 0x26 - invalid */
1347/* Opcode VEX.0F 0x27 - invalid */
1348
1349/**
1350 * @opcode 0x28
1351 * @oppfx none
1352 * @opcpuid avx
1353 * @opgroup og_avx_pcksclr_datamove
1354 * @opxcpttype 1
1355 * @optest op1=1 op2=2 -> op1=2
1356 * @optest op1=0 op2=-42 -> op1=-42
1357 * @note Almost identical to vmovapd.
1358 */
1359FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1360{
1361 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1363 Assert(pVCpu->iem.s.uVexLength <= 1);
1364 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1365 {
1366 /*
1367 * Register, register.
1368 */
1369 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1370 IEM_MC_BEGIN(1, 0);
1371
1372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1374 if (pVCpu->iem.s.uVexLength == 0)
1375 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1376 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1377 else
1378 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1379 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1380 IEM_MC_ADVANCE_RIP();
1381 IEM_MC_END();
1382 }
1383 else
1384 {
1385 /*
1386 * Register, memory.
1387 */
1388 if (pVCpu->iem.s.uVexLength == 0)
1389 {
1390 IEM_MC_BEGIN(0, 2);
1391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1392 IEM_MC_LOCAL(RTUINT128U, uSrc);
1393
1394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1395 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1397 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1398
1399 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1400 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1401
1402 IEM_MC_ADVANCE_RIP();
1403 IEM_MC_END();
1404 }
1405 else
1406 {
1407 IEM_MC_BEGIN(0, 2);
1408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1409 IEM_MC_LOCAL(RTUINT256U, uSrc);
1410
1411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1412 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1415
1416 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1417 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1418
1419 IEM_MC_ADVANCE_RIP();
1420 IEM_MC_END();
1421 }
1422 }
1423 return VINF_SUCCESS;
1424}
1425
1426
1427/**
1428 * @opcode 0x28
1429 * @oppfx 66
1430 * @opcpuid avx
1431 * @opgroup og_avx_pcksclr_datamove
1432 * @opxcpttype 1
1433 * @optest op1=1 op2=2 -> op1=2
1434 * @optest op1=0 op2=-42 -> op1=-42
1435 * @note Almost identical to vmovaps
1436 */
1437FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1438{
1439 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1441 Assert(pVCpu->iem.s.uVexLength <= 1);
1442 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1443 {
1444 /*
1445 * Register, register.
1446 */
1447 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1448 IEM_MC_BEGIN(1, 0);
1449
1450 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1451 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1452 if (pVCpu->iem.s.uVexLength == 0)
1453 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1454 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1455 else
1456 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1457 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1458 IEM_MC_ADVANCE_RIP();
1459 IEM_MC_END();
1460 }
1461 else
1462 {
1463 /*
1464 * Register, memory.
1465 */
1466 if (pVCpu->iem.s.uVexLength == 0)
1467 {
1468 IEM_MC_BEGIN(0, 2);
1469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1470 IEM_MC_LOCAL(RTUINT128U, uSrc);
1471
1472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1473 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1474 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1475 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1476
1477 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1478 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1479
1480 IEM_MC_ADVANCE_RIP();
1481 IEM_MC_END();
1482 }
1483 else
1484 {
1485 IEM_MC_BEGIN(0, 2);
1486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1487 IEM_MC_LOCAL(RTUINT256U, uSrc);
1488
1489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1492 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1493
1494 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1495 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1496
1497 IEM_MC_ADVANCE_RIP();
1498 IEM_MC_END();
1499 }
1500 }
1501 return VINF_SUCCESS;
1502}
1503
1504/**
1505 * @opmnemonic udvexf30f28
1506 * @opcode 0x28
1507 * @oppfx 0xf3
1508 * @opunused vex.modrm
1509 * @opcpuid avx
1510 * @optest ->
1511 * @opdone
1512 */
1513
1514/**
1515 * @opmnemonic udvexf20f28
1516 * @opcode 0x28
1517 * @oppfx 0xf2
1518 * @opunused vex.modrm
1519 * @opcpuid avx
1520 * @optest ->
1521 * @opdone
1522 */
1523
1524/**
1525 * @opcode 0x29
1526 * @oppfx none
1527 * @opcpuid avx
1528 * @opgroup og_avx_pcksclr_datamove
1529 * @opxcpttype 1
1530 * @optest op1=1 op2=2 -> op1=2
1531 * @optest op1=0 op2=-42 -> op1=-42
1532 * @note Almost identical to vmovapd.
1533 */
1534FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1535{
1536 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1538 Assert(pVCpu->iem.s.uVexLength <= 1);
1539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1540 {
1541 /*
1542 * Register, register.
1543 */
1544 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1545 IEM_MC_BEGIN(1, 0);
1546
1547 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1548 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1549 if (pVCpu->iem.s.uVexLength == 0)
1550 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1551 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1552 else
1553 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1554 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1555 IEM_MC_ADVANCE_RIP();
1556 IEM_MC_END();
1557 }
1558 else
1559 {
1560 /*
1561 * Register, memory.
1562 */
1563 if (pVCpu->iem.s.uVexLength == 0)
1564 {
1565 IEM_MC_BEGIN(0, 2);
1566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1567 IEM_MC_LOCAL(RTUINT128U, uSrc);
1568
1569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1570 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1572 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1573
1574 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1575 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1576
1577 IEM_MC_ADVANCE_RIP();
1578 IEM_MC_END();
1579 }
1580 else
1581 {
1582 IEM_MC_BEGIN(0, 2);
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1584 IEM_MC_LOCAL(RTUINT256U, uSrc);
1585
1586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1587 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1590
1591 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1592 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1593
1594 IEM_MC_ADVANCE_RIP();
1595 IEM_MC_END();
1596 }
1597 }
1598 return VINF_SUCCESS;
1599}
1600
1601/**
1602 * @opcode 0x29
1603 * @oppfx 66
1604 * @opcpuid avx
1605 * @opgroup og_avx_pcksclr_datamove
1606 * @opxcpttype 1
1607 * @optest op1=1 op2=2 -> op1=2
1608 * @optest op1=0 op2=-42 -> op1=-42
1609 * @note Almost identical to vmovaps
1610 */
1611FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1612{
1613 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1614 Assert(pVCpu->iem.s.uVexLength <= 1);
1615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1616 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1617 {
1618 /*
1619 * Register, register.
1620 */
1621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1622 IEM_MC_BEGIN(1, 0);
1623
1624 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1625 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1626 if (pVCpu->iem.s.uVexLength == 0)
1627 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1628 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1629 else
1630 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1631 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1632 IEM_MC_ADVANCE_RIP();
1633 IEM_MC_END();
1634 }
1635 else
1636 {
1637 /*
1638 * Register, memory.
1639 */
1640 if (pVCpu->iem.s.uVexLength == 0)
1641 {
1642 IEM_MC_BEGIN(0, 2);
1643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1644 IEM_MC_LOCAL(RTUINT128U, uSrc);
1645
1646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1647 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1648 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1649 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1650
1651 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1652 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1653
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656 }
1657 else
1658 {
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1661 IEM_MC_LOCAL(RTUINT256U, uSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1665 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 }
1675 return VINF_SUCCESS;
1676}
1677
1678
1679/**
1680 * @opmnemonic udvexf30f29
1681 * @opcode 0x29
1682 * @oppfx 0xf3
1683 * @opunused vex.modrm
1684 * @opcpuid avx
1685 * @optest ->
1686 * @opdone
1687 */
1688
1689/**
1690 * @opmnemonic udvexf20f29
1691 * @opcode 0x29
1692 * @oppfx 0xf2
1693 * @opunused vex.modrm
1694 * @opcpuid avx
1695 * @optest ->
1696 * @opdone
1697 */
1698
1699
1700/** Opcode VEX.0F 0x2a - invalid */
1701/** Opcode VEX.66.0F 0x2a - invalid */
1702/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
1703FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
1704/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1705FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
1706
1707
1708/**
1709 * @opcode 0x2b
1710 * @opcodesub !11 mr/reg
1711 * @oppfx none
1712 * @opcpuid avx
1713 * @opgroup og_avx_cachect
1714 * @opxcpttype 1
1715 * @optest op1=1 op2=2 -> op1=2
1716 * @optest op1=0 op2=-42 -> op1=-42
1717 * @note Identical implementation to vmovntpd
1718 */
1719FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1720{
1721 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1722 Assert(pVCpu->iem.s.uVexLength <= 1);
1723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1724 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1725 {
1726 /*
1727 * memory, register.
1728 */
1729 if (pVCpu->iem.s.uVexLength == 0)
1730 {
1731 IEM_MC_BEGIN(0, 2);
1732 IEM_MC_LOCAL(RTUINT128U, uSrc);
1733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1734
1735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1736 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1738 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1739
1740 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1741 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1742
1743 IEM_MC_ADVANCE_RIP();
1744 IEM_MC_END();
1745 }
1746 else
1747 {
1748 IEM_MC_BEGIN(0, 2);
1749 IEM_MC_LOCAL(RTUINT256U, uSrc);
1750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1751
1752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1753 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1754 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1755 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1756
1757 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1758 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1759
1760 IEM_MC_ADVANCE_RIP();
1761 IEM_MC_END();
1762 }
1763 }
1764 /* The register, register encoding is invalid. */
1765 else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767 return VINF_SUCCESS;
1768}
1769
1770/**
1771 * @opcode 0x2b
1772 * @opcodesub !11 mr/reg
1773 * @oppfx 0x66
1774 * @opcpuid avx
1775 * @opgroup og_avx_cachect
1776 * @opxcpttype 1
1777 * @optest op1=1 op2=2 -> op1=2
1778 * @optest op1=0 op2=-42 -> op1=-42
1779 * @note Identical implementation to vmovntps
1780 */
1781FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1782{
1783 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1784 Assert(pVCpu->iem.s.uVexLength <= 1);
1785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1786 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1787 {
1788 /*
1789 * memory, register.
1790 */
1791 if (pVCpu->iem.s.uVexLength == 0)
1792 {
1793 IEM_MC_BEGIN(0, 2);
1794 IEM_MC_LOCAL(RTUINT128U, uSrc);
1795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1796
1797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1798 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1801
1802 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1803 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1804
1805 IEM_MC_ADVANCE_RIP();
1806 IEM_MC_END();
1807 }
1808 else
1809 {
1810 IEM_MC_BEGIN(0, 2);
1811 IEM_MC_LOCAL(RTUINT256U, uSrc);
1812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1813
1814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1815 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1817 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1818
1819 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1820 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1821
1822 IEM_MC_ADVANCE_RIP();
1823 IEM_MC_END();
1824 }
1825 }
1826 /* The register, register encoding is invalid. */
1827 else
1828 return IEMOP_RAISE_INVALID_OPCODE();
1829 return VINF_SUCCESS;
1830}
1831
1832/**
1833 * @opmnemonic udvexf30f2b
1834 * @opcode 0x2b
1835 * @oppfx 0xf3
1836 * @opunused vex.modrm
1837 * @opcpuid avx
1838 * @optest ->
1839 * @opdone
1840 */
1841
1842/**
1843 * @opmnemonic udvexf20f2b
1844 * @opcode 0x2b
1845 * @oppfx 0xf2
1846 * @opunused vex.modrm
1847 * @opcpuid avx
1848 * @optest ->
1849 * @opdone
1850 */
1851
1852
1853/* Opcode VEX.0F 0x2c - invalid */
1854/* Opcode VEX.66.0F 0x2c - invalid */
1855/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
1856FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1857/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
1858FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1859
1860/* Opcode VEX.0F 0x2d - invalid */
1861/* Opcode VEX.66.0F 0x2d - invalid */
1862/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1863FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1864/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1865FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1866
1867/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1868FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1869/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1870FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1871/* Opcode VEX.F3.0F 0x2e - invalid */
1872/* Opcode VEX.F2.0F 0x2e - invalid */
1873
1874/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1875FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1876/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1877FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1878/* Opcode VEX.F3.0F 0x2f - invalid */
1879/* Opcode VEX.F2.0F 0x2f - invalid */
1880
1881/* Opcode VEX.0F 0x30 - invalid */
1882/* Opcode VEX.0F 0x31 - invalid */
1883/* Opcode VEX.0F 0x32 - invalid */
1884/* Opcode VEX.0F 0x33 - invalid */
1885/* Opcode VEX.0F 0x34 - invalid */
1886/* Opcode VEX.0F 0x35 - invalid */
1887/* Opcode VEX.0F 0x36 - invalid */
1888/* Opcode VEX.0F 0x37 - invalid */
1889/* Opcode VEX.0F 0x38 - invalid */
1890/* Opcode VEX.0F 0x39 - invalid */
1891/* Opcode VEX.0F 0x3a - invalid */
1892/* Opcode VEX.0F 0x3b - invalid */
1893/* Opcode VEX.0F 0x3c - invalid */
1894/* Opcode VEX.0F 0x3d - invalid */
1895/* Opcode VEX.0F 0x3e - invalid */
1896/* Opcode VEX.0F 0x3f - invalid */
1897/* Opcode VEX.0F 0x40 - invalid */
1898/* Opcode VEX.0F 0x41 - invalid */
1899/* Opcode VEX.0F 0x42 - invalid */
1900/* Opcode VEX.0F 0x43 - invalid */
1901/* Opcode VEX.0F 0x44 - invalid */
1902/* Opcode VEX.0F 0x45 - invalid */
1903/* Opcode VEX.0F 0x46 - invalid */
1904/* Opcode VEX.0F 0x47 - invalid */
1905/* Opcode VEX.0F 0x48 - invalid */
1906/* Opcode VEX.0F 0x49 - invalid */
1907/* Opcode VEX.0F 0x4a - invalid */
1908/* Opcode VEX.0F 0x4b - invalid */
1909/* Opcode VEX.0F 0x4c - invalid */
1910/* Opcode VEX.0F 0x4d - invalid */
1911/* Opcode VEX.0F 0x4e - invalid */
1912/* Opcode VEX.0F 0x4f - invalid */
1913
1914/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1915FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1916/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1917FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1918/* Opcode VEX.F3.0F 0x50 - invalid */
1919/* Opcode VEX.F2.0F 0x50 - invalid */
1920
1921/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1922FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1923/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1924FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1925/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1926FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1927/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1928FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1929
1930/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1931FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1932/* Opcode VEX.66.0F 0x52 - invalid */
1933/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1934FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1935/* Opcode VEX.F2.0F 0x52 - invalid */
1936
1937/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1938FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1939/* Opcode VEX.66.0F 0x53 - invalid */
1940/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1941FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1942/* Opcode VEX.F2.0F 0x53 - invalid */
1943
1944
1945/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1946FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
1947{
1948 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1949 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1950 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1951}
1952
1953
1954/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1955FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
1956{
1957 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1958 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1959 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1960}
1961
1962
1963/* Opcode VEX.F3.0F 0x54 - invalid */
1964/* Opcode VEX.F2.0F 0x54 - invalid */
1965
1966
1967/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1968FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
1969{
1970 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1971 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1972 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1973}
1974
1975
1976/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1977FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
1978{
1979 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1980 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1981 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1982}
1983
1984
1985/* Opcode VEX.F3.0F 0x55 - invalid */
1986/* Opcode VEX.F2.0F 0x55 - invalid */
1987
1988/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1989FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
1990{
1991 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1992 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1993 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
1994}
1995
1996
1997/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1998FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
1999{
2000 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2002 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2003}
2004
2005
2006/* Opcode VEX.F3.0F 0x56 - invalid */
2007/* Opcode VEX.F2.0F 0x56 - invalid */
2008
2009
2010/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2011FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2012{
2013 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2014 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2015 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2016}
2017
2018
2019/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2020FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2021{
2022 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2023 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2024 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2025}
2026
2027
2028/* Opcode VEX.F3.0F 0x57 - invalid */
2029/* Opcode VEX.F2.0F 0x57 - invalid */
2030
2031/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2032FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2033/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2034FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2035/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2036FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2037/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2038FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2039
2040/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2041FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2042/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2043FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2044/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2045FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2046/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2047FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2048
2049/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2050FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2051/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2052FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2053/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2054FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2055/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2056FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2057
2058/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2059FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2060/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2061FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2062/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2063FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2064/* Opcode VEX.F2.0F 0x5b - invalid */
2065
2066/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2067FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2068/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2069FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2070/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2071FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2072/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2073FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2074
2075/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2076FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2077/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2078FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2079/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2080FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2081/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2082FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2083
2084/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2085FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2086/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2087FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2088/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2089FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2090/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2091FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2092
2093/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2094FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2095/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2096FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2097/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2098FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2099/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2100FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2101
2102
2103/* Opcode VEX.0F 0x60 - invalid */
2104
2105
2106/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2107FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2108{
2109 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2110 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpcklbw_u128, iemAImpl_vpunpcklbw_u256,
2111 iemAImpl_vpunpcklbw_u128_fallback, iemAImpl_vpunpcklbw_u256_fallback);
2112 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2113}
2114
2115
2116/* Opcode VEX.F3.0F 0x60 - invalid */
2117
2118
2119/* Opcode VEX.0F 0x61 - invalid */
2120
2121
2122/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2123FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2124{
2125 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2126 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpcklwd_u128, iemAImpl_vpunpcklwd_u256,
2127 iemAImpl_vpunpcklwd_u128_fallback, iemAImpl_vpunpcklwd_u256_fallback);
2128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2129}
2130
2131
2132/* Opcode VEX.F3.0F 0x61 - invalid */
2133
2134
2135/* Opcode VEX.0F 0x62 - invalid */
2136
2137/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2138FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2139{
2140 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2141 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpckldq_u128, iemAImpl_vpunpckldq_u256,
2142 iemAImpl_vpunpckldq_u128_fallback, iemAImpl_vpunpckldq_u256_fallback);
2143 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2144}
2145
2146
2147/* Opcode VEX.F3.0F 0x62 - invalid */
2148
2149
2150
2151/* Opcode VEX.0F 0x63 - invalid */
2152/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2153FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2154/* Opcode VEX.F3.0F 0x63 - invalid */
2155
2156/* Opcode VEX.0F 0x64 - invalid */
2157
2158
2159/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2160FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2161{
2162 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2163 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2164 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpgtb, &g_iemAImpl_vpcmpgtb_fallback));
2165}
2166
2167
2168/* Opcode VEX.F3.0F 0x64 - invalid */
2169
2170/* Opcode VEX.0F 0x65 - invalid */
2171
2172
2173/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2174FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2175{
2176 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2177 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2178 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpgtw, &g_iemAImpl_vpcmpgtw_fallback));
2179}
2180
2181
2182/* Opcode VEX.F3.0F 0x65 - invalid */
2183
2184/* Opcode VEX.0F 0x66 - invalid */
2185
2186
2187/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2188FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2189{
2190 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2191 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2192 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpgtd, &g_iemAImpl_vpcmpgtd_fallback));
2193}
2194
2195
2196/* Opcode VEX.F3.0F 0x66 - invalid */
2197
2198/* Opcode VEX.0F 0x67 - invalid */
2199/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2200FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2201/* Opcode VEX.F3.0F 0x67 - invalid */
2202
2203
2204///**
2205// * Common worker for SSE2 instructions on the form:
2206// * pxxxx xmm1, xmm2/mem128
2207// *
2208// * The 2nd operand is the second half of a register, which in the memory case
2209// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2210// * where it may read the full 128 bits or only the upper 64 bits.
2211// *
2212// * Exceptions type 4.
2213// */
2214//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2215//{
2216// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2217// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2218// {
2219// /*
2220// * Register, register.
2221// */
2222// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223// IEM_MC_BEGIN(2, 0);
2224// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2225// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2226// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2227// IEM_MC_PREPARE_SSE_USAGE();
2228// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2229// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2230// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2231// IEM_MC_ADVANCE_RIP();
2232// IEM_MC_END();
2233// }
2234// else
2235// {
2236// /*
2237// * Register, memory.
2238// */
2239// IEM_MC_BEGIN(2, 2);
2240// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2241// IEM_MC_LOCAL(RTUINT128U, uSrc);
2242// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2243// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2244//
2245// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2246// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2247// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2248// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2249//
2250// IEM_MC_PREPARE_SSE_USAGE();
2251// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2252// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2253//
2254// IEM_MC_ADVANCE_RIP();
2255// IEM_MC_END();
2256// }
2257// return VINF_SUCCESS;
2258//}
2259
2260
2261/* Opcode VEX.0F 0x68 - invalid */
2262
2263/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2264FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2265{
2266 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2267 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpckhbw_u128, iemAImpl_vpunpckhbw_u256,
2268 iemAImpl_vpunpckhbw_u128_fallback, iemAImpl_vpunpckhbw_u256_fallback);
2269 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2270}
2271
2272
2273/* Opcode VEX.F3.0F 0x68 - invalid */
2274
2275
2276/* Opcode VEX.0F 0x69 - invalid */
2277
2278
2279/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2280FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2281{
2282 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2283 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpckhwd_u128, iemAImpl_vpunpckhwd_u256,
2284 iemAImpl_vpunpckhwd_u128_fallback, iemAImpl_vpunpckhwd_u256_fallback);
2285 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2286}
2287
2288
2289/* Opcode VEX.F3.0F 0x69 - invalid */
2290
2291
2292/* Opcode VEX.0F 0x6a - invalid */
2293
2294
2295/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2296FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2297{
2298 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2299 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpckhdq_u128, iemAImpl_vpunpckhdq_u256,
2300 iemAImpl_vpunpckhdq_u128_fallback, iemAImpl_vpunpckhdq_u256_fallback);
2301 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2302}
2303
2304
2305/* Opcode VEX.F3.0F 0x6a - invalid */
2306
2307
2308/* Opcode VEX.0F 0x6b - invalid */
2309/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2310FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2311/* Opcode VEX.F3.0F 0x6b - invalid */
2312
2313
2314/* Opcode VEX.0F 0x6c - invalid */
2315
2316
2317/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2318FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2319{
2320 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2321 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpcklqdq_u128, iemAImpl_vpunpcklqdq_u256,
2322 iemAImpl_vpunpcklqdq_u128_fallback, iemAImpl_vpunpcklqdq_u256_fallback);
2323 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2324}
2325
2326
2327/* Opcode VEX.F3.0F 0x6c - invalid */
2328/* Opcode VEX.F2.0F 0x6c - invalid */
2329
2330
2331/* Opcode VEX.0F 0x6d - invalid */
2332
2333
2334/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2335FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2336{
2337 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2338 IEMOPMEDIAOPTF3_INIT_VARS(iemAImpl_vpunpckhqdq_u128, iemAImpl_vpunpckhqdq_u256,
2339 iemAImpl_vpunpckhqdq_u128_fallback, iemAImpl_vpunpckhqdq_u256_fallback);
2340 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2341}
2342
2343
2344/* Opcode VEX.F3.0F 0x6d - invalid */
2345
2346
2347/* Opcode VEX.0F 0x6e - invalid */
2348
2349FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2353 {
2354 /**
2355 * @opcode 0x6e
2356 * @opcodesub rex.w=1
2357 * @oppfx 0x66
2358 * @opcpuid avx
2359 * @opgroup og_avx_simdint_datamov
2360 * @opxcpttype 5
2361 * @optest 64-bit / op1=1 op2=2 -> op1=2
2362 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2363 */
2364 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2366 {
2367 /* XMM, greg64 */
2368 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2369 IEM_MC_BEGIN(0, 1);
2370 IEM_MC_LOCAL(uint64_t, u64Tmp);
2371
2372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2373 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2374
2375 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2376 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2377
2378 IEM_MC_ADVANCE_RIP();
2379 IEM_MC_END();
2380 }
2381 else
2382 {
2383 /* XMM, [mem64] */
2384 IEM_MC_BEGIN(0, 2);
2385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2386 IEM_MC_LOCAL(uint64_t, u64Tmp);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2389 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2390 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2392
2393 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2394 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2395
2396 IEM_MC_ADVANCE_RIP();
2397 IEM_MC_END();
2398 }
2399 }
2400 else
2401 {
2402 /**
2403 * @opdone
2404 * @opcode 0x6e
2405 * @opcodesub rex.w=0
2406 * @oppfx 0x66
2407 * @opcpuid avx
2408 * @opgroup og_avx_simdint_datamov
2409 * @opxcpttype 5
2410 * @opfunction iemOp_vmovd_q_Vy_Ey
2411 * @optest op1=1 op2=2 -> op1=2
2412 * @optest op1=0 op2=-42 -> op1=-42
2413 */
2414 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2415 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2416 {
2417 /* XMM, greg32 */
2418 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2419 IEM_MC_BEGIN(0, 1);
2420 IEM_MC_LOCAL(uint32_t, u32Tmp);
2421
2422 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2423 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2424
2425 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2426 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2427
2428 IEM_MC_ADVANCE_RIP();
2429 IEM_MC_END();
2430 }
2431 else
2432 {
2433 /* XMM, [mem32] */
2434 IEM_MC_BEGIN(0, 2);
2435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2436 IEM_MC_LOCAL(uint32_t, u32Tmp);
2437
2438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2439 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2440 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2441 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2442
2443 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2444 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2445
2446 IEM_MC_ADVANCE_RIP();
2447 IEM_MC_END();
2448 }
2449 }
2450 return VINF_SUCCESS;
2451}
2452
2453
2454/* Opcode VEX.F3.0F 0x6e - invalid */
2455
2456
2457/* Opcode VEX.0F 0x6f - invalid */
2458
2459/**
2460 * @opcode 0x6f
2461 * @oppfx 0x66
2462 * @opcpuid avx
2463 * @opgroup og_avx_simdint_datamove
2464 * @opxcpttype 1
2465 * @optest op1=1 op2=2 -> op1=2
2466 * @optest op1=0 op2=-42 -> op1=-42
2467 */
2468FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2469{
2470 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2471 Assert(pVCpu->iem.s.uVexLength <= 1);
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2474 {
2475 /*
2476 * Register, register.
2477 */
2478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2479 IEM_MC_BEGIN(0, 0);
2480
2481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2483 if (pVCpu->iem.s.uVexLength == 0)
2484 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2485 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2486 else
2487 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2488 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2489 IEM_MC_ADVANCE_RIP();
2490 IEM_MC_END();
2491 }
2492 else if (pVCpu->iem.s.uVexLength == 0)
2493 {
2494 /*
2495 * Register, memory128.
2496 */
2497 IEM_MC_BEGIN(0, 2);
2498 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2500
2501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2502 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2503 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2504 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2505
2506 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2507 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2508
2509 IEM_MC_ADVANCE_RIP();
2510 IEM_MC_END();
2511 }
2512 else
2513 {
2514 /*
2515 * Register, memory256.
2516 */
2517 IEM_MC_BEGIN(0, 2);
2518 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2520
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2522 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2523 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2524 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2525
2526 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2527 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2528
2529 IEM_MC_ADVANCE_RIP();
2530 IEM_MC_END();
2531 }
2532 return VINF_SUCCESS;
2533}
2534
2535/**
2536 * @opcode 0x6f
2537 * @oppfx 0xf3
2538 * @opcpuid avx
2539 * @opgroup og_avx_simdint_datamove
2540 * @opxcpttype 4UA
2541 * @optest op1=1 op2=2 -> op1=2
2542 * @optest op1=0 op2=-42 -> op1=-42
2543 */
2544FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2545{
2546 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2547 Assert(pVCpu->iem.s.uVexLength <= 1);
2548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2550 {
2551 /*
2552 * Register, register.
2553 */
2554 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2555 IEM_MC_BEGIN(0, 0);
2556
2557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2559 if (pVCpu->iem.s.uVexLength == 0)
2560 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2561 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2562 else
2563 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2564 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 else if (pVCpu->iem.s.uVexLength == 0)
2569 {
2570 /*
2571 * Register, memory128.
2572 */
2573 IEM_MC_BEGIN(0, 2);
2574 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2576
2577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2578 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2579 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2580 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2581
2582 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2583 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2584
2585 IEM_MC_ADVANCE_RIP();
2586 IEM_MC_END();
2587 }
2588 else
2589 {
2590 /*
2591 * Register, memory256.
2592 */
2593 IEM_MC_BEGIN(0, 2);
2594 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2596
2597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2598 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2599 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2600 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2601
2602 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2603 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2604
2605 IEM_MC_ADVANCE_RIP();
2606 IEM_MC_END();
2607 }
2608 return VINF_SUCCESS;
2609}
2610
2611
2612/* Opcode VEX.0F 0x70 - invalid */
2613
2614
2615/**
2616 * Common worker for AVX/AVX2 instructions on the forms:
2617 * - vpxxx xmm0, xmm2/mem128, imm8
2618 * - vpxxx ymm0, ymm2/mem256, imm8
2619 *
2620 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
2621 */
2622FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
2623{
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2626 {
2627 /*
2628 * Register, register.
2629 */
2630 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2631 if (pVCpu->iem.s.uVexLength)
2632 {
2633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2634 IEM_MC_BEGIN(3, 2);
2635 IEM_MC_LOCAL(RTUINT256U, uDst);
2636 IEM_MC_LOCAL(RTUINT256U, uSrc);
2637 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2638 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2639 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2640 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2641 IEM_MC_PREPARE_AVX_USAGE();
2642 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2643 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
2644 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2645 IEM_MC_ADVANCE_RIP();
2646 IEM_MC_END();
2647 }
2648 else
2649 {
2650 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2651 IEM_MC_BEGIN(3, 0);
2652 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2653 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2654 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2655 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2656 IEM_MC_PREPARE_AVX_USAGE();
2657 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2658 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2659 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
2660 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2661 IEM_MC_ADVANCE_RIP();
2662 IEM_MC_END();
2663 }
2664 }
2665 else
2666 {
2667 /*
2668 * Register, memory.
2669 */
2670 if (pVCpu->iem.s.uVexLength)
2671 {
2672 IEM_MC_BEGIN(3, 3);
2673 IEM_MC_LOCAL(RTUINT256U, uDst);
2674 IEM_MC_LOCAL(RTUINT256U, uSrc);
2675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2676 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2677 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2678
2679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2680 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2681 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2682 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2683 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2684 IEM_MC_PREPARE_AVX_USAGE();
2685
2686 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
2688 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2689
2690 IEM_MC_ADVANCE_RIP();
2691 IEM_MC_END();
2692 }
2693 else
2694 {
2695 IEM_MC_BEGIN(3, 1);
2696 IEM_MC_LOCAL(RTUINT128U, uSrc);
2697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2698 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2699 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2700
2701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2702 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2703 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2704 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2705 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2706 IEM_MC_PREPARE_AVX_USAGE();
2707
2708 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2709 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2710 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
2711 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2712
2713 IEM_MC_ADVANCE_RIP();
2714 IEM_MC_END();
2715 }
2716 }
2717 return VINF_SUCCESS;
2718}
2719
2720
2721/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
2722FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2723{
2724 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2725 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
2726 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
2727
2728}
2729
2730
2731/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
2732FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2733{
2734 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2735 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
2736 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
2737
2738}
2739
2740
2741/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
2742FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2743{
2744 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2745 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
2746 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
2747}
2748
2749
2750/* Opcode VEX.0F 0x71 11/2 - invalid. */
2751/** Opcode VEX.66.0F 0x71 11/2. */
2752FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2753
2754/* Opcode VEX.0F 0x71 11/4 - invalid */
2755/** Opcode VEX.66.0F 0x71 11/4. */
2756FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2757
2758/* Opcode VEX.0F 0x71 11/6 - invalid */
2759/** Opcode VEX.66.0F 0x71 11/6. */
2760FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2761
2762
2763/**
2764 * VEX Group 12 jump table for register variant.
2765 */
2766IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
2767{
2768 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2769 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2770 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2771 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2772 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2773 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2774 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2775 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2776};
2777AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
2778
2779
2780/** Opcode VEX.0F 0x71. */
2781FNIEMOP_DEF(iemOp_VGrp12)
2782{
2783 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2785 /* register, register */
2786 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2787 + pVCpu->iem.s.idxPrefix], bRm);
2788 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2789}
2790
2791
2792/* Opcode VEX.0F 0x72 11/2 - invalid. */
2793/** Opcode VEX.66.0F 0x72 11/2. */
2794FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2795
2796/* Opcode VEX.0F 0x72 11/4 - invalid. */
2797/** Opcode VEX.66.0F 0x72 11/4. */
2798FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2799
2800/* Opcode VEX.0F 0x72 11/6 - invalid. */
2801/** Opcode VEX.66.0F 0x72 11/6. */
2802FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2803
2804
2805/**
2806 * Group 13 jump table for register variant.
2807 */
2808IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
2809{
2810 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2811 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2812 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2813 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2814 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2815 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2816 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2817 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2818};
2819AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
2820
2821/** Opcode VEX.0F 0x72. */
2822FNIEMOP_DEF(iemOp_VGrp13)
2823{
2824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2825 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2826 /* register, register */
2827 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2828 + pVCpu->iem.s.idxPrefix], bRm);
2829 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2830}
2831
2832
2833/* Opcode VEX.0F 0x73 11/2 - invalid. */
2834/** Opcode VEX.66.0F 0x73 11/2. */
2835FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
2836
2837/** Opcode VEX.66.0F 0x73 11/3. */
2838FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
2839
2840/* Opcode VEX.0F 0x73 11/6 - invalid. */
2841/** Opcode VEX.66.0F 0x73 11/6. */
2842FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
2843
2844/** Opcode VEX.66.0F 0x73 11/7. */
2845FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
2846
2847/**
2848 * Group 14 jump table for register variant.
2849 */
2850IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
2851{
2852 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2853 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2854 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2855 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2856 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2857 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2858 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2859 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2860};
2861AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
2862
2863
2864/** Opcode VEX.0F 0x73. */
2865FNIEMOP_DEF(iemOp_VGrp14)
2866{
2867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2869 /* register, register */
2870 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2871 + pVCpu->iem.s.idxPrefix], bRm);
2872 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2873}
2874
2875
2876/* Opcode VEX.0F 0x74 - invalid */
2877
2878
2879/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
2880FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
2881{
2882 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2883 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2884 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpeqb, &g_iemAImpl_vpcmpeqb_fallback));
2885}
2886
2887/* Opcode VEX.F3.0F 0x74 - invalid */
2888/* Opcode VEX.F2.0F 0x74 - invalid */
2889
2890
2891/* Opcode VEX.0F 0x75 - invalid */
2892
2893
2894/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
2895FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
2896{
2897 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2898 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2899 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpeqw, &g_iemAImpl_vpcmpeqw_fallback));
2900}
2901
2902
2903/* Opcode VEX.F3.0F 0x75 - invalid */
2904/* Opcode VEX.F2.0F 0x75 - invalid */
2905
2906
2907/* Opcode VEX.0F 0x76 - invalid */
2908
2909
2910/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
2911FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
2912{
2913 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2914 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2915 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpcmpeqd, &g_iemAImpl_vpcmpeqd_fallback));
2916}
2917
2918
2919/* Opcode VEX.F3.0F 0x76 - invalid */
2920/* Opcode VEX.F2.0F 0x76 - invalid */
2921
2922
2923/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
2924FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
2925/* Opcode VEX.66.0F 0x77 - invalid */
2926/* Opcode VEX.F3.0F 0x77 - invalid */
2927/* Opcode VEX.F2.0F 0x77 - invalid */
2928
2929/* Opcode VEX.0F 0x78 - invalid */
2930/* Opcode VEX.66.0F 0x78 - invalid */
2931/* Opcode VEX.F3.0F 0x78 - invalid */
2932/* Opcode VEX.F2.0F 0x78 - invalid */
2933
2934/* Opcode VEX.0F 0x79 - invalid */
2935/* Opcode VEX.66.0F 0x79 - invalid */
2936/* Opcode VEX.F3.0F 0x79 - invalid */
2937/* Opcode VEX.F2.0F 0x79 - invalid */
2938
2939/* Opcode VEX.0F 0x7a - invalid */
2940/* Opcode VEX.66.0F 0x7a - invalid */
2941/* Opcode VEX.F3.0F 0x7a - invalid */
2942/* Opcode VEX.F2.0F 0x7a - invalid */
2943
2944/* Opcode VEX.0F 0x7b - invalid */
2945/* Opcode VEX.66.0F 0x7b - invalid */
2946/* Opcode VEX.F3.0F 0x7b - invalid */
2947/* Opcode VEX.F2.0F 0x7b - invalid */
2948
2949/* Opcode VEX.0F 0x7c - invalid */
2950/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2951FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2952/* Opcode VEX.F3.0F 0x7c - invalid */
2953/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2954FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2955
2956/* Opcode VEX.0F 0x7d - invalid */
2957/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2958FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2959/* Opcode VEX.F3.0F 0x7d - invalid */
2960/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2961FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2962
2963
2964/* Opcode VEX.0F 0x7e - invalid */
2965
2966FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2967{
2968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2969 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2970 {
2971 /**
2972 * @opcode 0x7e
2973 * @opcodesub rex.w=1
2974 * @oppfx 0x66
2975 * @opcpuid avx
2976 * @opgroup og_avx_simdint_datamov
2977 * @opxcpttype 5
2978 * @optest 64-bit / op1=1 op2=2 -> op1=2
2979 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2980 */
2981 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2982 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2983 {
2984 /* greg64, XMM */
2985 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2986 IEM_MC_BEGIN(0, 1);
2987 IEM_MC_LOCAL(uint64_t, u64Tmp);
2988
2989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2991
2992 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2993 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
2994
2995 IEM_MC_ADVANCE_RIP();
2996 IEM_MC_END();
2997 }
2998 else
2999 {
3000 /* [mem64], XMM */
3001 IEM_MC_BEGIN(0, 2);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3003 IEM_MC_LOCAL(uint64_t, u64Tmp);
3004
3005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3006 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3007 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3008 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3009
3010 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3011 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3012
3013 IEM_MC_ADVANCE_RIP();
3014 IEM_MC_END();
3015 }
3016 }
3017 else
3018 {
3019 /**
3020 * @opdone
3021 * @opcode 0x7e
3022 * @opcodesub rex.w=0
3023 * @oppfx 0x66
3024 * @opcpuid avx
3025 * @opgroup og_avx_simdint_datamov
3026 * @opxcpttype 5
3027 * @opfunction iemOp_vmovd_q_Vy_Ey
3028 * @optest op1=1 op2=2 -> op1=2
3029 * @optest op1=0 op2=-42 -> op1=-42
3030 */
3031 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3032 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3033 {
3034 /* greg32, XMM */
3035 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3036 IEM_MC_BEGIN(0, 1);
3037 IEM_MC_LOCAL(uint32_t, u32Tmp);
3038
3039 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3040 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3041
3042 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3043 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3044
3045 IEM_MC_ADVANCE_RIP();
3046 IEM_MC_END();
3047 }
3048 else
3049 {
3050 /* [mem32], XMM */
3051 IEM_MC_BEGIN(0, 2);
3052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3053 IEM_MC_LOCAL(uint32_t, u32Tmp);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3056 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3057 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3058 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3059
3060 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3061 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3062
3063 IEM_MC_ADVANCE_RIP();
3064 IEM_MC_END();
3065 }
3066 }
3067 return VINF_SUCCESS;
3068}
3069
3070/**
3071 * @opcode 0x7e
3072 * @oppfx 0xf3
3073 * @opcpuid avx
3074 * @opgroup og_avx_pcksclr_datamove
3075 * @opxcpttype none
3076 * @optest op1=1 op2=2 -> op1=2
3077 * @optest op1=0 op2=-42 -> op1=-42
3078 */
3079FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3080{
3081 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3083 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3084 {
3085 /*
3086 * Register, register.
3087 */
3088 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3089 IEM_MC_BEGIN(0, 0);
3090
3091 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3092 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3093
3094 IEM_MC_COPY_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3095 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3096 IEM_MC_ADVANCE_RIP();
3097 IEM_MC_END();
3098 }
3099 else
3100 {
3101 /*
3102 * Memory, register.
3103 */
3104 IEM_MC_BEGIN(0, 2);
3105 IEM_MC_LOCAL(uint64_t, uSrc);
3106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3107
3108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3109 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3110 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3111 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3112
3113 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3114 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
3115
3116 IEM_MC_ADVANCE_RIP();
3117 IEM_MC_END();
3118 }
3119 return VINF_SUCCESS;
3120
3121}
3122/* Opcode VEX.F2.0F 0x7e - invalid */
3123
3124
3125/* Opcode VEX.0F 0x7f - invalid */
3126
3127/**
3128 * @opcode 0x7f
3129 * @oppfx 0x66
3130 * @opcpuid avx
3131 * @opgroup og_avx_simdint_datamove
3132 * @opxcpttype 1
3133 * @optest op1=1 op2=2 -> op1=2
3134 * @optest op1=0 op2=-42 -> op1=-42
3135 */
3136FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3137{
3138 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3139 Assert(pVCpu->iem.s.uVexLength <= 1);
3140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3142 {
3143 /*
3144 * Register, register.
3145 */
3146 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3147 IEM_MC_BEGIN(0, 0);
3148
3149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3150 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3151 if (pVCpu->iem.s.uVexLength == 0)
3152 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3153 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3154 else
3155 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3156 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3157 IEM_MC_ADVANCE_RIP();
3158 IEM_MC_END();
3159 }
3160 else if (pVCpu->iem.s.uVexLength == 0)
3161 {
3162 /*
3163 * Register, memory128.
3164 */
3165 IEM_MC_BEGIN(0, 2);
3166 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3168
3169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3170 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3171 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3172 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3173
3174 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3175 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3176
3177 IEM_MC_ADVANCE_RIP();
3178 IEM_MC_END();
3179 }
3180 else
3181 {
3182 /*
3183 * Register, memory256.
3184 */
3185 IEM_MC_BEGIN(0, 2);
3186 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3188
3189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3193
3194 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3195 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3196
3197 IEM_MC_ADVANCE_RIP();
3198 IEM_MC_END();
3199 }
3200 return VINF_SUCCESS;
3201}
3202
3203/**
3204 * @opcode 0x7f
3205 * @oppfx 0xf3
3206 * @opcpuid avx
3207 * @opgroup og_avx_simdint_datamove
3208 * @opxcpttype 4UA
3209 * @optest op1=1 op2=2 -> op1=2
3210 * @optest op1=0 op2=-42 -> op1=-42
3211 */
3212FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3213{
3214 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3215 Assert(pVCpu->iem.s.uVexLength <= 1);
3216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3218 {
3219 /*
3220 * Register, register.
3221 */
3222 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3223 IEM_MC_BEGIN(0, 0);
3224
3225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3227 if (pVCpu->iem.s.uVexLength == 0)
3228 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3229 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3230 else
3231 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3232 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3233 IEM_MC_ADVANCE_RIP();
3234 IEM_MC_END();
3235 }
3236 else if (pVCpu->iem.s.uVexLength == 0)
3237 {
3238 /*
3239 * Register, memory128.
3240 */
3241 IEM_MC_BEGIN(0, 2);
3242 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3244
3245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3246 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3248 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3249
3250 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3251 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3252
3253 IEM_MC_ADVANCE_RIP();
3254 IEM_MC_END();
3255 }
3256 else
3257 {
3258 /*
3259 * Register, memory256.
3260 */
3261 IEM_MC_BEGIN(0, 2);
3262 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3264
3265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3266 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3269
3270 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3271 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3272
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 return VINF_SUCCESS;
3277}
3278
3279/* Opcode VEX.F2.0F 0x7f - invalid */
3280
3281
3282/* Opcode VEX.0F 0x80 - invalid */
3283/* Opcode VEX.0F 0x81 - invalid */
3284/* Opcode VEX.0F 0x82 - invalid */
3285/* Opcode VEX.0F 0x83 - invalid */
3286/* Opcode VEX.0F 0x84 - invalid */
3287/* Opcode VEX.0F 0x85 - invalid */
3288/* Opcode VEX.0F 0x86 - invalid */
3289/* Opcode VEX.0F 0x87 - invalid */
3290/* Opcode VEX.0F 0x88 - invalid */
3291/* Opcode VEX.0F 0x89 - invalid */
3292/* Opcode VEX.0F 0x8a - invalid */
3293/* Opcode VEX.0F 0x8b - invalid */
3294/* Opcode VEX.0F 0x8c - invalid */
3295/* Opcode VEX.0F 0x8d - invalid */
3296/* Opcode VEX.0F 0x8e - invalid */
3297/* Opcode VEX.0F 0x8f - invalid */
3298/* Opcode VEX.0F 0x90 - invalid */
3299/* Opcode VEX.0F 0x91 - invalid */
3300/* Opcode VEX.0F 0x92 - invalid */
3301/* Opcode VEX.0F 0x93 - invalid */
3302/* Opcode VEX.0F 0x94 - invalid */
3303/* Opcode VEX.0F 0x95 - invalid */
3304/* Opcode VEX.0F 0x96 - invalid */
3305/* Opcode VEX.0F 0x97 - invalid */
3306/* Opcode VEX.0F 0x98 - invalid */
3307/* Opcode VEX.0F 0x99 - invalid */
3308/* Opcode VEX.0F 0x9a - invalid */
3309/* Opcode VEX.0F 0x9b - invalid */
3310/* Opcode VEX.0F 0x9c - invalid */
3311/* Opcode VEX.0F 0x9d - invalid */
3312/* Opcode VEX.0F 0x9e - invalid */
3313/* Opcode VEX.0F 0x9f - invalid */
3314/* Opcode VEX.0F 0xa0 - invalid */
3315/* Opcode VEX.0F 0xa1 - invalid */
3316/* Opcode VEX.0F 0xa2 - invalid */
3317/* Opcode VEX.0F 0xa3 - invalid */
3318/* Opcode VEX.0F 0xa4 - invalid */
3319/* Opcode VEX.0F 0xa5 - invalid */
3320/* Opcode VEX.0F 0xa6 - invalid */
3321/* Opcode VEX.0F 0xa7 - invalid */
3322/* Opcode VEX.0F 0xa8 - invalid */
3323/* Opcode VEX.0F 0xa9 - invalid */
3324/* Opcode VEX.0F 0xaa - invalid */
3325/* Opcode VEX.0F 0xab - invalid */
3326/* Opcode VEX.0F 0xac - invalid */
3327/* Opcode VEX.0F 0xad - invalid */
3328
3329
3330/* Opcode VEX.0F 0xae mem/0 - invalid. */
3331/* Opcode VEX.0F 0xae mem/1 - invalid. */
3332
3333/**
3334 * @ opmaps grp15
3335 * @ opcode !11/2
3336 * @ oppfx none
3337 * @ opcpuid sse
3338 * @ opgroup og_sse_mxcsrsm
3339 * @ opxcpttype 5
3340 * @ optest op1=0 -> mxcsr=0
3341 * @ optest op1=0x2083 -> mxcsr=0x2083
3342 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3343 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3344 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3345 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3346 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3347 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3348 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3349 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3350 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3351 */
3352FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3353//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3354//{
3355// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3356// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3357// return IEMOP_RAISE_INVALID_OPCODE();
3358//
3359// IEM_MC_BEGIN(2, 0);
3360// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3361// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3362// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3363// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3364// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3365// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3366// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3367// IEM_MC_END();
3368// return VINF_SUCCESS;
3369//}
3370
3371
3372/**
3373 * @opmaps vexgrp15
3374 * @opcode !11/3
3375 * @oppfx none
3376 * @opcpuid avx
3377 * @opgroup og_avx_mxcsrsm
3378 * @opxcpttype 5
3379 * @optest mxcsr=0 -> op1=0
3380 * @optest mxcsr=0x2083 -> op1=0x2083
3381 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3382 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3383 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3384 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3385 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3386 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3387 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3388 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3389 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3390 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3391 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3392 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3393 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3394 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3395 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3396 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3397 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3398 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3399 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3400 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3401 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3402 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3403 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3404 * -> value.xcpt=0x6
3405 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3406 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3407 * APMv4 rev 3.17 page 509.
3408 * @todo Test this instruction on AMD Ryzen.
3409 */
3410FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3411{
3412 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3413 IEM_MC_BEGIN(2, 0);
3414 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3415 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3417 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3418 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3419 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3420 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3421 IEM_MC_END();
3422 return VINF_SUCCESS;
3423}
3424
3425/* Opcode VEX.0F 0xae mem/4 - invalid. */
3426/* Opcode VEX.0F 0xae mem/5 - invalid. */
3427/* Opcode VEX.0F 0xae mem/6 - invalid. */
3428/* Opcode VEX.0F 0xae mem/7 - invalid. */
3429
3430/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3431/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3432/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3433/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3434/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3435/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3436/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3437/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3438
3439/**
3440 * Vex group 15 jump table for memory variant.
3441 */
3442IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3443{ /* pfx: none, 066h, 0f3h, 0f2h */
3444 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3445 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3446 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3447 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3448 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3449 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3450 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3451 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3452};
3453AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3454
3455
3456/** Opcode vex. 0xae. */
3457FNIEMOP_DEF(iemOp_VGrp15)
3458{
3459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3461 /* register, register */
3462 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3463
3464 /* memory, register */
3465 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3466 + pVCpu->iem.s.idxPrefix], bRm);
3467}
3468
3469
3470/* Opcode VEX.0F 0xaf - invalid. */
3471
3472/* Opcode VEX.0F 0xb0 - invalid. */
3473/* Opcode VEX.0F 0xb1 - invalid. */
3474/* Opcode VEX.0F 0xb2 - invalid. */
3475/* Opcode VEX.0F 0xb2 - invalid. */
3476/* Opcode VEX.0F 0xb3 - invalid. */
3477/* Opcode VEX.0F 0xb4 - invalid. */
3478/* Opcode VEX.0F 0xb5 - invalid. */
3479/* Opcode VEX.0F 0xb6 - invalid. */
3480/* Opcode VEX.0F 0xb7 - invalid. */
3481/* Opcode VEX.0F 0xb8 - invalid. */
3482/* Opcode VEX.0F 0xb9 - invalid. */
3483/* Opcode VEX.0F 0xba - invalid. */
3484/* Opcode VEX.0F 0xbb - invalid. */
3485/* Opcode VEX.0F 0xbc - invalid. */
3486/* Opcode VEX.0F 0xbd - invalid. */
3487/* Opcode VEX.0F 0xbe - invalid. */
3488/* Opcode VEX.0F 0xbf - invalid. */
3489
3490/* Opcode VEX.0F 0xc0 - invalid. */
3491/* Opcode VEX.66.0F 0xc0 - invalid. */
3492/* Opcode VEX.F3.0F 0xc0 - invalid. */
3493/* Opcode VEX.F2.0F 0xc0 - invalid. */
3494
3495/* Opcode VEX.0F 0xc1 - invalid. */
3496/* Opcode VEX.66.0F 0xc1 - invalid. */
3497/* Opcode VEX.F3.0F 0xc1 - invalid. */
3498/* Opcode VEX.F2.0F 0xc1 - invalid. */
3499
3500/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3501FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3502/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3503FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3504/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3505FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3506/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3507FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3508
3509/* Opcode VEX.0F 0xc3 - invalid */
3510/* Opcode VEX.66.0F 0xc3 - invalid */
3511/* Opcode VEX.F3.0F 0xc3 - invalid */
3512/* Opcode VEX.F2.0F 0xc3 - invalid */
3513
3514/* Opcode VEX.0F 0xc4 - invalid */
3515/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3516FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3517/* Opcode VEX.F3.0F 0xc4 - invalid */
3518/* Opcode VEX.F2.0F 0xc4 - invalid */
3519
3520/* Opcode VEX.0F 0xc5 - invlid */
3521/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3522FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3523/* Opcode VEX.F3.0F 0xc5 - invalid */
3524/* Opcode VEX.F2.0F 0xc5 - invalid */
3525
3526/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3527FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3528/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3529FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3530/* Opcode VEX.F3.0F 0xc6 - invalid */
3531/* Opcode VEX.F2.0F 0xc6 - invalid */
3532
3533/* Opcode VEX.0F 0xc7 - invalid */
3534/* Opcode VEX.66.0F 0xc7 - invalid */
3535/* Opcode VEX.F3.0F 0xc7 - invalid */
3536/* Opcode VEX.F2.0F 0xc7 - invalid */
3537
3538/* Opcode VEX.0F 0xc8 - invalid */
3539/* Opcode VEX.0F 0xc9 - invalid */
3540/* Opcode VEX.0F 0xca - invalid */
3541/* Opcode VEX.0F 0xcb - invalid */
3542/* Opcode VEX.0F 0xcc - invalid */
3543/* Opcode VEX.0F 0xcd - invalid */
3544/* Opcode VEX.0F 0xce - invalid */
3545/* Opcode VEX.0F 0xcf - invalid */
3546
3547
3548/* Opcode VEX.0F 0xd0 - invalid */
3549/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3550FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3551/* Opcode VEX.F3.0F 0xd0 - invalid */
3552/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3553FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
3554
3555/* Opcode VEX.0F 0xd1 - invalid */
3556/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
3557FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
3558/* Opcode VEX.F3.0F 0xd1 - invalid */
3559/* Opcode VEX.F2.0F 0xd1 - invalid */
3560
3561/* Opcode VEX.0F 0xd2 - invalid */
3562/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
3563FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
3564/* Opcode VEX.F3.0F 0xd2 - invalid */
3565/* Opcode VEX.F2.0F 0xd2 - invalid */
3566
3567/* Opcode VEX.0F 0xd3 - invalid */
3568/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
3569FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
3570/* Opcode VEX.F3.0F 0xd3 - invalid */
3571/* Opcode VEX.F2.0F 0xd3 - invalid */
3572
3573/* Opcode VEX.0F 0xd4 - invalid */
3574
3575
3576/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
3577FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpaddq, &g_iemAImpl_vpaddq_fallback));
3582}
3583
3584
3585/* Opcode VEX.F3.0F 0xd4 - invalid */
3586/* Opcode VEX.F2.0F 0xd4 - invalid */
3587
3588/* Opcode VEX.0F 0xd5 - invalid */
3589/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
3590FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
3591/* Opcode VEX.F3.0F 0xd5 - invalid */
3592/* Opcode VEX.F2.0F 0xd5 - invalid */
3593
3594/* Opcode VEX.0F 0xd6 - invalid */
3595
3596/**
3597 * @opcode 0xd6
3598 * @oppfx 0x66
3599 * @opcpuid avx
3600 * @opgroup og_avx_pcksclr_datamove
3601 * @opxcpttype none
3602 * @optest op1=-1 op2=2 -> op1=2
3603 * @optest op1=0 op2=-42 -> op1=-42
3604 */
3605FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
3606{
3607 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3609 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3610 {
3611 /*
3612 * Register, register.
3613 */
3614 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3615 IEM_MC_BEGIN(0, 0);
3616
3617 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3618 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3619
3620 IEM_MC_COPY_YREG_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3621 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3622 IEM_MC_ADVANCE_RIP();
3623 IEM_MC_END();
3624 }
3625 else
3626 {
3627 /*
3628 * Memory, register.
3629 */
3630 IEM_MC_BEGIN(0, 2);
3631 IEM_MC_LOCAL(uint64_t, uSrc);
3632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3633
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3635 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3637 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3638
3639 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3640 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3641
3642 IEM_MC_ADVANCE_RIP();
3643 IEM_MC_END();
3644 }
3645 return VINF_SUCCESS;
3646}
3647
3648/* Opcode VEX.F3.0F 0xd6 - invalid */
3649/* Opcode VEX.F2.0F 0xd6 - invalid */
3650
3651
3652/* Opcode VEX.0F 0xd7 - invalid */
3653
3654/** Opcode VEX.66.0F 0xd7 - */
3655FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
3656{
3657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3658 /* Docs says register only. */
3659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
3660 {
3661 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
3662 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
3663 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3664 if (pVCpu->iem.s.uVexLength)
3665 {
3666 IEM_MC_BEGIN(2, 1);
3667 IEM_MC_ARG(uint64_t *, puDst, 0);
3668 IEM_MC_LOCAL(RTUINT256U, uSrc);
3669 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3670 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3671 IEM_MC_PREPARE_AVX_USAGE();
3672 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3673 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3674 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
3675 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
3676 IEM_MC_ADVANCE_RIP();
3677 IEM_MC_END();
3678 }
3679 else
3680 {
3681 IEM_MC_BEGIN(2, 0);
3682 IEM_MC_ARG(uint64_t *, puDst, 0);
3683 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3684 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3685 IEM_MC_PREPARE_AVX_USAGE();
3686 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3687 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3688 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 }
3692 return VINF_SUCCESS;
3693 }
3694 return IEMOP_RAISE_INVALID_OPCODE();
3695}
3696
3697
3698/* Opcode VEX.F3.0F 0xd7 - invalid */
3699/* Opcode VEX.F2.0F 0xd7 - invalid */
3700
3701
3702/* Opcode VEX.0F 0xd8 - invalid */
3703/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
3704FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
3705/* Opcode VEX.F3.0F 0xd8 - invalid */
3706/* Opcode VEX.F2.0F 0xd8 - invalid */
3707
3708/* Opcode VEX.0F 0xd9 - invalid */
3709/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
3710FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
3711/* Opcode VEX.F3.0F 0xd9 - invalid */
3712/* Opcode VEX.F2.0F 0xd9 - invalid */
3713
3714/* Opcode VEX.0F 0xda - invalid */
3715/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
3716FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
3717/* Opcode VEX.F3.0F 0xda - invalid */
3718/* Opcode VEX.F2.0F 0xda - invalid */
3719
3720/* Opcode VEX.0F 0xdb - invalid */
3721
3722
3723/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
3724FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
3725{
3726 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3727 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3728 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3729}
3730
3731
3732/* Opcode VEX.F3.0F 0xdb - invalid */
3733/* Opcode VEX.F2.0F 0xdb - invalid */
3734
3735/* Opcode VEX.0F 0xdc - invalid */
3736/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
3737FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
3738/* Opcode VEX.F3.0F 0xdc - invalid */
3739/* Opcode VEX.F2.0F 0xdc - invalid */
3740
3741/* Opcode VEX.0F 0xdd - invalid */
3742/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
3743FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
3744/* Opcode VEX.F3.0F 0xdd - invalid */
3745/* Opcode VEX.F2.0F 0xdd - invalid */
3746
3747/* Opcode VEX.0F 0xde - invalid */
3748/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
3749FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
3750/* Opcode VEX.F3.0F 0xde - invalid */
3751/* Opcode VEX.F2.0F 0xde - invalid */
3752
3753/* Opcode VEX.0F 0xdf - invalid */
3754
3755
3756/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
3757FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
3758{
3759 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3760 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3761 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3762}
3763
3764
3765/* Opcode VEX.F3.0F 0xdf - invalid */
3766/* Opcode VEX.F2.0F 0xdf - invalid */
3767
3768/* Opcode VEX.0F 0xe0 - invalid */
3769/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
3770FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
3771/* Opcode VEX.F3.0F 0xe0 - invalid */
3772/* Opcode VEX.F2.0F 0xe0 - invalid */
3773
3774/* Opcode VEX.0F 0xe1 - invalid */
3775/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
3776FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
3777/* Opcode VEX.F3.0F 0xe1 - invalid */
3778/* Opcode VEX.F2.0F 0xe1 - invalid */
3779
3780/* Opcode VEX.0F 0xe2 - invalid */
3781/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
3782FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
3783/* Opcode VEX.F3.0F 0xe2 - invalid */
3784/* Opcode VEX.F2.0F 0xe2 - invalid */
3785
3786/* Opcode VEX.0F 0xe3 - invalid */
3787/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
3788FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
3789/* Opcode VEX.F3.0F 0xe3 - invalid */
3790/* Opcode VEX.F2.0F 0xe3 - invalid */
3791
3792/* Opcode VEX.0F 0xe4 - invalid */
3793/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
3794FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
3795/* Opcode VEX.F3.0F 0xe4 - invalid */
3796/* Opcode VEX.F2.0F 0xe4 - invalid */
3797
3798/* Opcode VEX.0F 0xe5 - invalid */
3799/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
3800FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
3801/* Opcode VEX.F3.0F 0xe5 - invalid */
3802/* Opcode VEX.F2.0F 0xe5 - invalid */
3803
3804/* Opcode VEX.0F 0xe6 - invalid */
3805/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
3806FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
3807/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
3808FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
3809/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
3810FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
3811
3812
3813/* Opcode VEX.0F 0xe7 - invalid */
3814
3815/**
3816 * @opcode 0xe7
3817 * @opcodesub !11 mr/reg
3818 * @oppfx 0x66
3819 * @opcpuid avx
3820 * @opgroup og_avx_cachect
3821 * @opxcpttype 1
3822 * @optest op1=-1 op2=2 -> op1=2
3823 * @optest op1=0 op2=-42 -> op1=-42
3824 */
3825FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
3826{
3827 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3828 Assert(pVCpu->iem.s.uVexLength <= 1);
3829 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3830 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3831 {
3832 if (pVCpu->iem.s.uVexLength == 0)
3833 {
3834 /*
3835 * 128-bit: Memory, register.
3836 */
3837 IEM_MC_BEGIN(0, 2);
3838 IEM_MC_LOCAL(RTUINT128U, uSrc);
3839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3840
3841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3844 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3845
3846 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3847 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3848
3849 IEM_MC_ADVANCE_RIP();
3850 IEM_MC_END();
3851 }
3852 else
3853 {
3854 /*
3855 * 256-bit: Memory, register.
3856 */
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(RTUINT256U, uSrc);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3860
3861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3862 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3863 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3864 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3865
3866 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3867 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3868
3869 IEM_MC_ADVANCE_RIP();
3870 IEM_MC_END();
3871 }
3872 return VINF_SUCCESS;
3873 }
3874 /**
3875 * @opdone
3876 * @opmnemonic udvex660fe7reg
3877 * @opcode 0xe7
3878 * @opcodesub 11 mr/reg
3879 * @oppfx 0x66
3880 * @opunused immediate
3881 * @opcpuid avx
3882 * @optest ->
3883 */
3884 return IEMOP_RAISE_INVALID_OPCODE();
3885}
3886
3887/* Opcode VEX.F3.0F 0xe7 - invalid */
3888/* Opcode VEX.F2.0F 0xe7 - invalid */
3889
3890
3891/* Opcode VEX.0F 0xe8 - invalid */
3892/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
3893FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
3894/* Opcode VEX.F3.0F 0xe8 - invalid */
3895/* Opcode VEX.F2.0F 0xe8 - invalid */
3896
3897/* Opcode VEX.0F 0xe9 - invalid */
3898/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
3899FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
3900/* Opcode VEX.F3.0F 0xe9 - invalid */
3901/* Opcode VEX.F2.0F 0xe9 - invalid */
3902
3903/* Opcode VEX.0F 0xea - invalid */
3904/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
3905FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
3906/* Opcode VEX.F3.0F 0xea - invalid */
3907/* Opcode VEX.F2.0F 0xea - invalid */
3908
3909/* Opcode VEX.0F 0xeb - invalid */
3910
3911
3912/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
3913FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
3914{
3915 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3916 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3917 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3918}
3919
3920
3921
3922/* Opcode VEX.F3.0F 0xeb - invalid */
3923/* Opcode VEX.F2.0F 0xeb - invalid */
3924
3925/* Opcode VEX.0F 0xec - invalid */
3926/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
3927FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
3928/* Opcode VEX.F3.0F 0xec - invalid */
3929/* Opcode VEX.F2.0F 0xec - invalid */
3930
3931/* Opcode VEX.0F 0xed - invalid */
3932/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
3933FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
3934/* Opcode VEX.F3.0F 0xed - invalid */
3935/* Opcode VEX.F2.0F 0xed - invalid */
3936
3937/* Opcode VEX.0F 0xee - invalid */
3938/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
3939FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
3940/* Opcode VEX.F3.0F 0xee - invalid */
3941/* Opcode VEX.F2.0F 0xee - invalid */
3942
3943
3944/* Opcode VEX.0F 0xef - invalid */
3945
3946
3947/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
3948FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
3949{
3950 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3951 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3952 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3953}
3954
3955
3956/* Opcode VEX.F3.0F 0xef - invalid */
3957/* Opcode VEX.F2.0F 0xef - invalid */
3958
3959/* Opcode VEX.0F 0xf0 - invalid */
3960/* Opcode VEX.66.0F 0xf0 - invalid */
3961/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
3962FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
3963
3964/* Opcode VEX.0F 0xf1 - invalid */
3965/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
3966FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
3967/* Opcode VEX.F2.0F 0xf1 - invalid */
3968
3969/* Opcode VEX.0F 0xf2 - invalid */
3970/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
3971FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
3972/* Opcode VEX.F2.0F 0xf2 - invalid */
3973
3974/* Opcode VEX.0F 0xf3 - invalid */
3975/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
3976FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
3977/* Opcode VEX.F2.0F 0xf3 - invalid */
3978
3979/* Opcode VEX.0F 0xf4 - invalid */
3980/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
3981FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
3982/* Opcode VEX.F2.0F 0xf4 - invalid */
3983
3984/* Opcode VEX.0F 0xf5 - invalid */
3985/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
3986FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
3987/* Opcode VEX.F2.0F 0xf5 - invalid */
3988
3989/* Opcode VEX.0F 0xf6 - invalid */
3990/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
3991FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
3992/* Opcode VEX.F2.0F 0xf6 - invalid */
3993
3994/* Opcode VEX.0F 0xf7 - invalid */
3995/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
3996FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
3997/* Opcode VEX.F2.0F 0xf7 - invalid */
3998
3999/* Opcode VEX.0F 0xf8 - invalid */
4000
4001
4002/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
4003FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
4004{
4005 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4006 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4007 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpsubb, &g_iemAImpl_vpsubb_fallback));
4008}
4009
4010
4011/* Opcode VEX.F2.0F 0xf8 - invalid */
4012
4013/* Opcode VEX.0F 0xf9 - invalid */
4014
4015
4016/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
4017FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
4018{
4019 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4020 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4021 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpsubw, &g_iemAImpl_vpsubw_fallback));
4022}
4023
4024
4025/* Opcode VEX.F2.0F 0xf9 - invalid */
4026
4027/* Opcode VEX.0F 0xfa - invalid */
4028
4029
4030/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
4031FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
4032{
4033 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4034 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4035 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpsubd, &g_iemAImpl_vpsubd_fallback));
4036}
4037
4038
4039/* Opcode VEX.F2.0F 0xfa - invalid */
4040
4041/* Opcode VEX.0F 0xfb - invalid */
4042
4043
4044/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
4045FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
4046{
4047 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4048 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4049 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpsubq, &g_iemAImpl_vpsubq_fallback));
4050}
4051
4052
4053/* Opcode VEX.F2.0F 0xfb - invalid */
4054
4055/* Opcode VEX.0F 0xfc - invalid */
4056
4057
4058/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
4059FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
4060{
4061 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4062 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4063 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpaddb, &g_iemAImpl_vpaddb_fallback));
4064}
4065
4066
4067/* Opcode VEX.F2.0F 0xfc - invalid */
4068
4069/* Opcode VEX.0F 0xfd - invalid */
4070
4071
4072/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
4073FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
4074{
4075 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4076 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4077 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpaddw, &g_iemAImpl_vpaddw_fallback));
4078}
4079
4080
4081/* Opcode VEX.F2.0F 0xfd - invalid */
4082
4083/* Opcode VEX.0F 0xfe - invalid */
4084
4085
4086/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
4087FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
4088{
4089 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4090 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4091 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpaddd, &g_iemAImpl_vpaddd_fallback));
4092}
4093
4094
4095/* Opcode VEX.F2.0F 0xfe - invalid */
4096
4097
4098/** Opcode **** 0x0f 0xff - UD0 */
4099FNIEMOP_DEF(iemOp_vud0)
4100{
4101 IEMOP_MNEMONIC(vud0, "vud0");
4102 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
4103 {
4104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
4105#ifndef TST_IEM_CHECK_MC
4106 RTGCPTR GCPtrEff;
4107 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
4108 if (rcStrict != VINF_SUCCESS)
4109 return rcStrict;
4110#endif
4111 IEMOP_HLP_DONE_DECODING();
4112 }
4113 return IEMOP_RAISE_INVALID_OPCODE();
4114}
4115
4116
4117
4118/**
4119 * VEX opcode map \#1.
4120 *
4121 * @sa g_apfnTwoByteMap
4122 */
4123IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
4124{
4125 /* no prefix, 066h prefix f3h prefix, f2h prefix */
4126 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
4127 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
4128 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
4129 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
4130 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
4131 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
4132 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
4133 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
4134 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
4135 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
4136 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
4137 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
4138 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
4139 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
4140 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
4141 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
4142
4143 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
4144 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
4145 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
4146 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4147 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4148 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4149 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
4150 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4151 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
4152 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
4153 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
4154 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
4155 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
4156 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
4157 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
4158 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
4159
4160 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
4161 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
4162 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
4163 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
4164 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
4165 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
4166 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
4167 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
4168 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4169 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4170 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
4171 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4172 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
4173 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
4174 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4175 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4176
4177 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
4178 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
4179 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
4180 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
4181 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
4182 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
4183 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
4184 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
4185 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4186 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4187 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4188 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4189 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4190 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4191 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4192 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
4193
4194 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
4195 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
4196 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
4197 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
4198 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
4199 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
4200 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
4201 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
4202 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
4203 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
4204 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
4205 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
4206 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
4207 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
4208 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
4209 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
4210
4211 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4212 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
4213 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4214 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4215 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4216 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4217 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4218 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4219 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
4220 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
4221 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
4222 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
4223 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
4224 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
4225 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
4226 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
4227
4228 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4229 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4230 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4231 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4232 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4233 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4234 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4235 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4236 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4237 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4238 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4239 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4240 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4241 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4242 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4243 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
4244
4245 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
4246 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4247 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4248 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4249 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4250 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4251 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4252 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4253 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
4254 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
4255 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
4256 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
4257 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
4258 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
4259 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
4260 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
4261
4262 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
4263 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
4264 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
4265 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
4266 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
4267 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
4268 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
4269 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
4270 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
4271 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4272 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4273 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4274 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4275 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4276 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4277 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4278
4279 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4280 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4281 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4282 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4283 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4284 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4285 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4286 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4287 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4288 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4289 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4290 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4291 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4292 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4293 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4294 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4295
4296 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4297 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4298 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4299 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4300 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4301 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4302 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4303 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4304 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4305 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4306 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4307 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4308 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4309 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4310 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4311 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4312
4313 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4314 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4315 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4316 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4317 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4318 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4319 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4320 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4321 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4322 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4323 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4324 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4325 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4326 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4327 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4328 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4329
4330 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4331 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4332 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4333 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4334 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4335 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4336 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4337 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4338 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4339 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4340 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4341 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4342 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4343 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4344 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4345 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4346
4347 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4348 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4349 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4350 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4351 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4352 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4353 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4354 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4355 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4356 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4357 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4358 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4359 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4360 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4361 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4362 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4363
4364 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4365 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4366 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4367 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4368 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4369 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4370 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4371 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4372 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4373 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4374 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4375 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4376 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4377 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4378 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4379 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4380
4381 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4382 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4383 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4384 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4385 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4386 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4387 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4388 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4389 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4390 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4391 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4392 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4393 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4394 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4395 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4396 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4397};
4398AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4399/** @} */
4400
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette