VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 95447

Last change on this file since 95447 was 95441, checked in by vboxsync, 3 years ago

VMM/IEM: [v]andps, [v]andpd, [v]pand, [v]andnps, [v]andnpd, [v]pandn, [v]orps, [v]orpd, and [v]por. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 157.5 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 95441 2022-06-29 22:40:14Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26/**
27 * Common worker for AVX2 instructions on the forms:
28 * - vpxxx xmm0, xmm1, xmm2/mem128
29 * - vpxxx ymm0, ymm1, ymm2/mem256
30 *
31 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
32 */
33FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
34{
35 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
36 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
37 {
38 /*
39 * Register, register.
40 */
41 if (pVCpu->iem.s.uVexLength)
42 {
43 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
44 IEM_MC_BEGIN(4, 3);
45 IEM_MC_LOCAL(RTUINT256U, uDst);
46 IEM_MC_LOCAL(RTUINT256U, uSrc1);
47 IEM_MC_LOCAL(RTUINT256U, uSrc2);
48 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
49 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
50 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
51 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
52 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
55 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
56 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
57 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
58 IEM_MC_ADVANCE_RIP();
59 IEM_MC_END();
60 }
61 else
62 {
63 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
64 IEM_MC_BEGIN(4, 0);
65 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
66 IEM_MC_ARG(PRTUINT128U, puDst, 1);
67 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
68 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
69 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
70 IEM_MC_PREPARE_AVX_USAGE();
71 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
72 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
73 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
74 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
75 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
76 IEM_MC_ADVANCE_RIP();
77 IEM_MC_END();
78 }
79 }
80 else
81 {
82 /*
83 * Register, memory.
84 */
85 if (pVCpu->iem.s.uVexLength)
86 {
87 IEM_MC_BEGIN(4, 4);
88 IEM_MC_LOCAL(RTUINT256U, uDst);
89 IEM_MC_LOCAL(RTUINT256U, uSrc1);
90 IEM_MC_LOCAL(RTUINT256U, uSrc2);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
93 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
94 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
95 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
96
97 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
98 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
99 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
100 IEM_MC_PREPARE_AVX_USAGE();
101
102 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
103 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
104 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
105 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
106
107 IEM_MC_ADVANCE_RIP();
108 IEM_MC_END();
109 }
110 else
111 {
112 IEM_MC_BEGIN(4, 2);
113 IEM_MC_LOCAL(RTUINT128U, uSrc2);
114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
115 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
116 IEM_MC_ARG(PRTUINT128U, puDst, 1);
117 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
118 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
119
120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
121 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
122 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
123 IEM_MC_PREPARE_AVX_USAGE();
124
125 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
126 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
127 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
128 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
129 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
130
131 IEM_MC_ADVANCE_RIP();
132 IEM_MC_END();
133 }
134 }
135 return VINF_SUCCESS;
136}
137
138
139
140/* Opcode VEX.0F 0x00 - invalid */
141/* Opcode VEX.0F 0x01 - invalid */
142/* Opcode VEX.0F 0x02 - invalid */
143/* Opcode VEX.0F 0x03 - invalid */
144/* Opcode VEX.0F 0x04 - invalid */
145/* Opcode VEX.0F 0x05 - invalid */
146/* Opcode VEX.0F 0x06 - invalid */
147/* Opcode VEX.0F 0x07 - invalid */
148/* Opcode VEX.0F 0x08 - invalid */
149/* Opcode VEX.0F 0x09 - invalid */
150/* Opcode VEX.0F 0x0a - invalid */
151
152/** Opcode VEX.0F 0x0b. */
153FNIEMOP_DEF(iemOp_vud2)
154{
155 IEMOP_MNEMONIC(vud2, "vud2");
156 return IEMOP_RAISE_INVALID_OPCODE();
157}
158
159/* Opcode VEX.0F 0x0c - invalid */
160/* Opcode VEX.0F 0x0d - invalid */
161/* Opcode VEX.0F 0x0e - invalid */
162/* Opcode VEX.0F 0x0f - invalid */
163
164
165/**
166 * @opcode 0x10
167 * @oppfx none
168 * @opcpuid avx
169 * @opgroup og_avx_simdfp_datamove
170 * @opxcpttype 4UA
171 * @optest op1=1 op2=2 -> op1=2
172 * @optest op1=0 op2=-22 -> op1=-22
173 */
174FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
175{
176 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
177 Assert(pVCpu->iem.s.uVexLength <= 1);
178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
179 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
180 {
181 /*
182 * Register, register.
183 */
184 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
185 IEM_MC_BEGIN(0, 0);
186 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
187 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
188 if (pVCpu->iem.s.uVexLength == 0)
189 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
190 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
191 else
192 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
193 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
194 IEM_MC_ADVANCE_RIP();
195 IEM_MC_END();
196 }
197 else if (pVCpu->iem.s.uVexLength == 0)
198 {
199 /*
200 * 128-bit: Register, Memory
201 */
202 IEM_MC_BEGIN(0, 2);
203 IEM_MC_LOCAL(RTUINT128U, uSrc);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205
206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
207 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
208 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
209 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
210
211 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
212 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
213
214 IEM_MC_ADVANCE_RIP();
215 IEM_MC_END();
216 }
217 else
218 {
219 /*
220 * 256-bit: Register, Memory
221 */
222 IEM_MC_BEGIN(0, 2);
223 IEM_MC_LOCAL(RTUINT256U, uSrc);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
229 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
230
231 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
232 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
233
234 IEM_MC_ADVANCE_RIP();
235 IEM_MC_END();
236 }
237 return VINF_SUCCESS;
238}
239
240
241/**
242 * @opcode 0x10
243 * @oppfx 0x66
244 * @opcpuid avx
245 * @opgroup og_avx_simdfp_datamove
246 * @opxcpttype 4UA
247 * @optest op1=1 op2=2 -> op1=2
248 * @optest op1=0 op2=-22 -> op1=-22
249 */
250FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
251{
252 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 Assert(pVCpu->iem.s.uVexLength <= 1);
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
256 {
257 /*
258 * Register, register.
259 */
260 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
261 IEM_MC_BEGIN(0, 0);
262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
264 if (pVCpu->iem.s.uVexLength == 0)
265 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
266 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
267 else
268 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
269 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_ADVANCE_RIP();
271 IEM_MC_END();
272 }
273 else if (pVCpu->iem.s.uVexLength == 0)
274 {
275 /*
276 * 128-bit: Memory, register.
277 */
278 IEM_MC_BEGIN(0, 2);
279 IEM_MC_LOCAL(RTUINT128U, uSrc);
280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
281
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
283 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
284 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
285 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
286
287 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
288 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
289
290 IEM_MC_ADVANCE_RIP();
291 IEM_MC_END();
292 }
293 else
294 {
295 /*
296 * 256-bit: Memory, register.
297 */
298 IEM_MC_BEGIN(0, 2);
299 IEM_MC_LOCAL(RTUINT256U, uSrc);
300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
301
302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
303 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
305 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
306
307 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
308 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
309
310 IEM_MC_ADVANCE_RIP();
311 IEM_MC_END();
312 }
313 return VINF_SUCCESS;
314}
315
316
317FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
318{
319 Assert(pVCpu->iem.s.uVexLength <= 1);
320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
321 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
322 {
323 /**
324 * @opcode 0x10
325 * @oppfx 0xf3
326 * @opcodesub 11 mr/reg
327 * @opcpuid avx
328 * @opgroup og_avx_simdfp_datamerge
329 * @opxcpttype 5
330 * @optest op1=1 op2=0 op3=2 -> op1=2
331 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
332 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
333 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
334 * @note HssHi refers to bits 127:32.
335 */
336 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
337 IEMOP_HLP_DONE_VEX_DECODING();
338 IEM_MC_BEGIN(0, 0);
339
340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
342 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
343 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
344 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
345 IEM_MC_ADVANCE_RIP();
346 IEM_MC_END();
347 }
348 else
349 {
350 /**
351 * @opdone
352 * @opcode 0x10
353 * @oppfx 0xf3
354 * @opcodesub !11 mr/reg
355 * @opcpuid avx
356 * @opgroup og_avx_simdfp_datamove
357 * @opxcpttype 5
358 * @opfunction iemOp_vmovss_Vss_Hss_Wss
359 * @optest op1=1 op2=2 -> op1=2
360 * @optest op1=0 op2=-22 -> op1=-22
361 */
362 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
363 IEM_MC_BEGIN(0, 2);
364 IEM_MC_LOCAL(uint32_t, uSrc);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
368 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
369 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
370 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
371
372 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
373 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
374
375 IEM_MC_ADVANCE_RIP();
376 IEM_MC_END();
377 }
378
379 return VINF_SUCCESS;
380}
381
382
383FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
384{
385 Assert(pVCpu->iem.s.uVexLength <= 1);
386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
387 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
388 {
389 /**
390 * @opcode 0x10
391 * @oppfx 0xf2
392 * @opcodesub 11 mr/reg
393 * @opcpuid avx
394 * @opgroup og_avx_simdfp_datamerge
395 * @opxcpttype 5
396 * @optest op1=1 op2=0 op3=2 -> op1=2
397 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
398 * @optest op1=3 op2=-1 op3=0x77 ->
399 * op1=0xffffffffffffffff0000000000000077
400 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
401 */
402 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
403 IEMOP_HLP_DONE_VEX_DECODING();
404 IEM_MC_BEGIN(0, 0);
405
406 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
407 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
408 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
409 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
410 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
411 IEM_MC_ADVANCE_RIP();
412 IEM_MC_END();
413 }
414 else
415 {
416 /**
417 * @opdone
418 * @opcode 0x10
419 * @oppfx 0xf2
420 * @opcodesub !11 mr/reg
421 * @opcpuid avx
422 * @opgroup og_avx_simdfp_datamove
423 * @opxcpttype 5
424 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
425 * @optest op1=1 op2=2 -> op1=2
426 * @optest op1=0 op2=-22 -> op1=-22
427 */
428 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
429 IEM_MC_BEGIN(0, 2);
430 IEM_MC_LOCAL(uint64_t, uSrc);
431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
432
433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
437
438 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
439 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
440
441 IEM_MC_ADVANCE_RIP();
442 IEM_MC_END();
443 }
444
445 return VINF_SUCCESS;
446}
447
448
449/**
450 * @opcode 0x11
451 * @oppfx none
452 * @opcpuid avx
453 * @opgroup og_avx_simdfp_datamove
454 * @opxcpttype 4UA
455 * @optest op1=1 op2=2 -> op1=2
456 * @optest op1=0 op2=-22 -> op1=-22
457 */
458FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
459{
460 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
461 Assert(pVCpu->iem.s.uVexLength <= 1);
462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
463 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
464 {
465 /*
466 * Register, register.
467 */
468 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
469 IEM_MC_BEGIN(0, 0);
470 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
471 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
472 if (pVCpu->iem.s.uVexLength == 0)
473 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
474 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
475 else
476 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
477 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
478 IEM_MC_ADVANCE_RIP();
479 IEM_MC_END();
480 }
481 else if (pVCpu->iem.s.uVexLength == 0)
482 {
483 /*
484 * 128-bit: Memory, register.
485 */
486 IEM_MC_BEGIN(0, 2);
487 IEM_MC_LOCAL(RTUINT128U, uSrc);
488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
489
490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
491 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
492 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
493 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
494
495 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
496 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
497
498 IEM_MC_ADVANCE_RIP();
499 IEM_MC_END();
500 }
501 else
502 {
503 /*
504 * 256-bit: Memory, register.
505 */
506 IEM_MC_BEGIN(0, 2);
507 IEM_MC_LOCAL(RTUINT256U, uSrc);
508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
509
510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
511 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
512 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
513 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
514
515 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
516 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
517
518 IEM_MC_ADVANCE_RIP();
519 IEM_MC_END();
520 }
521 return VINF_SUCCESS;
522}
523
524
525/**
526 * @opcode 0x11
527 * @oppfx 0x66
528 * @opcpuid avx
529 * @opgroup og_avx_simdfp_datamove
530 * @opxcpttype 4UA
531 * @optest op1=1 op2=2 -> op1=2
532 * @optest op1=0 op2=-22 -> op1=-22
533 */
534FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
535{
536 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
537 Assert(pVCpu->iem.s.uVexLength <= 1);
538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
540 {
541 /*
542 * Register, register.
543 */
544 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
545 IEM_MC_BEGIN(0, 0);
546 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
547 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
548 if (pVCpu->iem.s.uVexLength == 0)
549 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
550 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
551 else
552 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
554 IEM_MC_ADVANCE_RIP();
555 IEM_MC_END();
556 }
557 else if (pVCpu->iem.s.uVexLength == 0)
558 {
559 /*
560 * 128-bit: Memory, register.
561 */
562 IEM_MC_BEGIN(0, 2);
563 IEM_MC_LOCAL(RTUINT128U, uSrc);
564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
565
566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
567 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
568 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
569 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
570
571 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
573
574 IEM_MC_ADVANCE_RIP();
575 IEM_MC_END();
576 }
577 else
578 {
579 /*
580 * 256-bit: Memory, register.
581 */
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(RTUINT256U, uSrc);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
585
586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
587 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
589 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
590
591 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
592 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
593
594 IEM_MC_ADVANCE_RIP();
595 IEM_MC_END();
596 }
597 return VINF_SUCCESS;
598}
599
600
601FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
602{
603 Assert(pVCpu->iem.s.uVexLength <= 1);
604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
606 {
607 /**
608 * @opcode 0x11
609 * @oppfx 0xf3
610 * @opcodesub 11 mr/reg
611 * @opcpuid avx
612 * @opgroup og_avx_simdfp_datamerge
613 * @opxcpttype 5
614 * @optest op1=1 op2=0 op3=2 -> op1=2
615 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
616 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
617 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
618 */
619 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
620 IEMOP_HLP_DONE_VEX_DECODING();
621 IEM_MC_BEGIN(0, 0);
622
623 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
624 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
625 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
626 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
627 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
628 IEM_MC_ADVANCE_RIP();
629 IEM_MC_END();
630 }
631 else
632 {
633 /**
634 * @opdone
635 * @opcode 0x11
636 * @oppfx 0xf3
637 * @opcodesub !11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamove
640 * @opxcpttype 5
641 * @opfunction iemOp_vmovss_Vss_Hss_Wss
642 * @optest op1=1 op2=2 -> op1=2
643 * @optest op1=0 op2=-22 -> op1=-22
644 */
645 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
646 IEM_MC_BEGIN(0, 2);
647 IEM_MC_LOCAL(uint32_t, uSrc);
648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
649
650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
651 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
652 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
653 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
654
655 IEM_MC_FETCH_YREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
656 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
657
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 }
661
662 return VINF_SUCCESS;
663}
664
665
666FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
667{
668 Assert(pVCpu->iem.s.uVexLength <= 1);
669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
671 {
672 /**
673 * @opcode 0x11
674 * @oppfx 0xf2
675 * @opcodesub 11 mr/reg
676 * @opcpuid avx
677 * @opgroup og_avx_simdfp_datamerge
678 * @opxcpttype 5
679 * @optest op1=1 op2=0 op3=2 -> op1=2
680 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
681 * @optest op1=3 op2=-1 op3=0x77 ->
682 * op1=0xffffffffffffffff0000000000000077
683 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
684 */
685 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
686 IEMOP_HLP_DONE_VEX_DECODING();
687 IEM_MC_BEGIN(0, 0);
688
689 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
690 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
691 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
692 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
693 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
694 IEM_MC_ADVANCE_RIP();
695 IEM_MC_END();
696 }
697 else
698 {
699 /**
700 * @opdone
701 * @opcode 0x11
702 * @oppfx 0xf2
703 * @opcodesub !11 mr/reg
704 * @opcpuid avx
705 * @opgroup og_avx_simdfp_datamove
706 * @opxcpttype 5
707 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
708 * @optest op1=1 op2=2 -> op1=2
709 * @optest op1=0 op2=-22 -> op1=-22
710 */
711 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
712 IEM_MC_BEGIN(0, 2);
713 IEM_MC_LOCAL(uint64_t, uSrc);
714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
715
716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
717 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
720
721 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
723
724 IEM_MC_ADVANCE_RIP();
725 IEM_MC_END();
726 }
727
728 return VINF_SUCCESS;
729}
730
731
732FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
733{
734 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
735 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
736 {
737 /**
738 * @opcode 0x12
739 * @opcodesub 11 mr/reg
740 * @oppfx none
741 * @opcpuid avx
742 * @opgroup og_avx_simdfp_datamerge
743 * @opxcpttype 7LZ
744 * @optest op2=0x2200220122022203
745 * op3=0x3304330533063307
746 * -> op1=0x22002201220222033304330533063307
747 * @optest op2=-1 op3=-42 -> op1=-42
748 * @note op3 and op2 are only the 8-byte high XMM register halfs.
749 */
750 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
751
752 IEMOP_HLP_DONE_VEX_DECODING_L0();
753 IEM_MC_BEGIN(0, 0);
754
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
757 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
758 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
759 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
760
761 IEM_MC_ADVANCE_RIP();
762 IEM_MC_END();
763 }
764 else
765 {
766 /**
767 * @opdone
768 * @opcode 0x12
769 * @opcodesub !11 mr/reg
770 * @oppfx none
771 * @opcpuid avx
772 * @opgroup og_avx_simdfp_datamove
773 * @opxcpttype 5LZ
774 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
775 * @optest op1=1 op2=0 op3=0 -> op1=0
776 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
777 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
778 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
779 */
780 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
781
782 IEM_MC_BEGIN(0, 2);
783 IEM_MC_LOCAL(uint64_t, uSrc);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
785
786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
787 IEMOP_HLP_DONE_VEX_DECODING_L0();
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790
791 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
792 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
793 uSrc,
794 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
795
796 IEM_MC_ADVANCE_RIP();
797 IEM_MC_END();
798 }
799 return VINF_SUCCESS;
800}
801
802
803/**
804 * @opcode 0x12
805 * @opcodesub !11 mr/reg
806 * @oppfx 0x66
807 * @opcpuid avx
808 * @opgroup og_avx_pcksclr_datamerge
809 * @opxcpttype 5LZ
810 * @optest op2=0 op3=2 -> op1=2
811 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
812 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
813 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
814 */
815FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
816{
817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
818 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
819 {
820 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
821
822 IEM_MC_BEGIN(0, 2);
823 IEM_MC_LOCAL(uint64_t, uSrc);
824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
825
826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
827 IEMOP_HLP_DONE_VEX_DECODING_L0();
828 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
829 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
830
831 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
832 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
833 uSrc,
834 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
835
836 IEM_MC_ADVANCE_RIP();
837 IEM_MC_END();
838 return VINF_SUCCESS;
839 }
840
841 /**
842 * @opdone
843 * @opmnemonic udvex660f12m3
844 * @opcode 0x12
845 * @opcodesub 11 mr/reg
846 * @oppfx 0x66
847 * @opunused immediate
848 * @opcpuid avx
849 * @optest ->
850 */
851 return IEMOP_RAISE_INVALID_OPCODE();
852}
853
854
855/**
856 * @opcode 0x12
857 * @oppfx 0xf3
858 * @opcpuid avx
859 * @opgroup og_avx_pcksclr_datamove
860 * @opxcpttype 4
861 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
862 * -> op1=0x00000002000000020000000100000001
863 * @optest vex.l==1 /
864 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
865 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
866 */
867FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
868{
869 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
870 Assert(pVCpu->iem.s.uVexLength <= 1);
871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
873 {
874 /*
875 * Register, register.
876 */
877 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
878 if (pVCpu->iem.s.uVexLength == 0)
879 {
880 IEM_MC_BEGIN(2, 0);
881 IEM_MC_ARG(PRTUINT128U, puDst, 0);
882 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
883
884 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
885 IEM_MC_PREPARE_AVX_USAGE();
886
887 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
888 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
889 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
890 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
891
892 IEM_MC_ADVANCE_RIP();
893 IEM_MC_END();
894 }
895 else
896 {
897 IEM_MC_BEGIN(3, 0);
898 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
899 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
900 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
901
902 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
903 IEM_MC_PREPARE_AVX_USAGE();
904 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
905
906 IEM_MC_ADVANCE_RIP();
907 IEM_MC_END();
908 }
909 }
910 else
911 {
912 /*
913 * Register, memory.
914 */
915 if (pVCpu->iem.s.uVexLength == 0)
916 {
917 IEM_MC_BEGIN(2, 2);
918 IEM_MC_LOCAL(RTUINT128U, uSrc);
919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
920 IEM_MC_ARG(PRTUINT128U, puDst, 0);
921 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
925 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
926 IEM_MC_PREPARE_AVX_USAGE();
927
928 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
929 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
930 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
931 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
932
933 IEM_MC_ADVANCE_RIP();
934 IEM_MC_END();
935 }
936 else
937 {
938 IEM_MC_BEGIN(3, 2);
939 IEM_MC_LOCAL(RTUINT256U, uSrc);
940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
941 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
942 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
943 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
944
945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
946 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
947 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
948 IEM_MC_PREPARE_AVX_USAGE();
949
950 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
951 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
952
953 IEM_MC_ADVANCE_RIP();
954 IEM_MC_END();
955 }
956 }
957 return VINF_SUCCESS;
958}
959
960
961/**
962 * @opcode 0x12
963 * @oppfx 0xf2
964 * @opcpuid avx
965 * @opgroup og_avx_pcksclr_datamove
966 * @opxcpttype 5
967 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
968 * -> op1=0x22222222111111112222222211111111
969 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
970 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
971 */
972FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
973{
974 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
975 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
977 {
978 /*
979 * Register, register.
980 */
981 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
982 if (pVCpu->iem.s.uVexLength == 0)
983 {
984 IEM_MC_BEGIN(2, 0);
985 IEM_MC_ARG(PRTUINT128U, puDst, 0);
986 IEM_MC_ARG(uint64_t, uSrc, 1);
987
988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
989 IEM_MC_PREPARE_AVX_USAGE();
990
991 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
992 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
993 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
994 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
995
996 IEM_MC_ADVANCE_RIP();
997 IEM_MC_END();
998 }
999 else
1000 {
1001 IEM_MC_BEGIN(3, 0);
1002 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1003 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1004 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
1005
1006 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1007 IEM_MC_PREPARE_AVX_USAGE();
1008 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1009
1010 IEM_MC_ADVANCE_RIP();
1011 IEM_MC_END();
1012 }
1013 }
1014 else
1015 {
1016 /*
1017 * Register, memory.
1018 */
1019 if (pVCpu->iem.s.uVexLength == 0)
1020 {
1021 IEM_MC_BEGIN(2, 2);
1022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1023 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1024 IEM_MC_ARG(uint64_t, uSrc, 1);
1025
1026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1027 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1028 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1029 IEM_MC_PREPARE_AVX_USAGE();
1030
1031 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1032 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1033 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1034 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1035
1036 IEM_MC_ADVANCE_RIP();
1037 IEM_MC_END();
1038 }
1039 else
1040 {
1041 IEM_MC_BEGIN(3, 2);
1042 IEM_MC_LOCAL(RTUINT256U, uSrc);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1044 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1045 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
1046 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1047
1048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1049 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1050 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1051 IEM_MC_PREPARE_AVX_USAGE();
1052
1053 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1054 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1055
1056 IEM_MC_ADVANCE_RIP();
1057 IEM_MC_END();
1058 }
1059 }
1060 return VINF_SUCCESS;
1061}
1062
1063
1064/**
1065 * @opcode 0x13
1066 * @opcodesub !11 mr/reg
1067 * @oppfx none
1068 * @opcpuid avx
1069 * @opgroup og_avx_simdfp_datamove
1070 * @opxcpttype 5
1071 * @optest op1=1 op2=2 -> op1=2
1072 * @optest op1=0 op2=-42 -> op1=-42
1073 */
1074FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1075{
1076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1077 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1078 {
1079 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1080
1081 IEM_MC_BEGIN(0, 2);
1082 IEM_MC_LOCAL(uint64_t, uSrc);
1083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1084
1085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1086 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1087 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1088 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1089
1090 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1091 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1092
1093 IEM_MC_ADVANCE_RIP();
1094 IEM_MC_END();
1095 return VINF_SUCCESS;
1096 }
1097
1098 /**
1099 * @opdone
1100 * @opmnemonic udvex0f13m3
1101 * @opcode 0x13
1102 * @opcodesub 11 mr/reg
1103 * @oppfx none
1104 * @opunused immediate
1105 * @opcpuid avx
1106 * @optest ->
1107 */
1108 return IEMOP_RAISE_INVALID_OPCODE();
1109}
1110
1111
1112/**
1113 * @opcode 0x13
1114 * @opcodesub !11 mr/reg
1115 * @oppfx 0x66
1116 * @opcpuid avx
1117 * @opgroup og_avx_pcksclr_datamove
1118 * @opxcpttype 5
1119 * @optest op1=1 op2=2 -> op1=2
1120 * @optest op1=0 op2=-42 -> op1=-42
1121 */
1122FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1123{
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1126 {
1127 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1128 IEM_MC_BEGIN(0, 2);
1129 IEM_MC_LOCAL(uint64_t, uSrc);
1130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1131
1132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1133 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1135 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1136
1137 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1138 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1139
1140 IEM_MC_ADVANCE_RIP();
1141 IEM_MC_END();
1142 return VINF_SUCCESS;
1143 }
1144
1145 /**
1146 * @opdone
1147 * @opmnemonic udvex660f13m3
1148 * @opcode 0x13
1149 * @opcodesub 11 mr/reg
1150 * @oppfx 0x66
1151 * @opunused immediate
1152 * @opcpuid avx
1153 * @optest ->
1154 */
1155 return IEMOP_RAISE_INVALID_OPCODE();
1156}
1157
1158/* Opcode VEX.F3.0F 0x13 - invalid */
1159/* Opcode VEX.F2.0F 0x13 - invalid */
1160
1161/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1162FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1163/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1164FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1165/* Opcode VEX.F3.0F 0x14 - invalid */
1166/* Opcode VEX.F2.0F 0x14 - invalid */
1167/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1168FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1169/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1170FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1171/* Opcode VEX.F3.0F 0x15 - invalid */
1172/* Opcode VEX.F2.0F 0x15 - invalid */
1173/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1174FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1175/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1176FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1177/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1178FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1179/* Opcode VEX.F2.0F 0x16 - invalid */
1180/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
1181FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1182/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
1183FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1184/* Opcode VEX.F3.0F 0x17 - invalid */
1185/* Opcode VEX.F2.0F 0x17 - invalid */
1186
1187
1188/* Opcode VEX.0F 0x18 - invalid */
1189/* Opcode VEX.0F 0x19 - invalid */
1190/* Opcode VEX.0F 0x1a - invalid */
1191/* Opcode VEX.0F 0x1b - invalid */
1192/* Opcode VEX.0F 0x1c - invalid */
1193/* Opcode VEX.0F 0x1d - invalid */
1194/* Opcode VEX.0F 0x1e - invalid */
1195/* Opcode VEX.0F 0x1f - invalid */
1196
1197/* Opcode VEX.0F 0x20 - invalid */
1198/* Opcode VEX.0F 0x21 - invalid */
1199/* Opcode VEX.0F 0x22 - invalid */
1200/* Opcode VEX.0F 0x23 - invalid */
1201/* Opcode VEX.0F 0x24 - invalid */
1202/* Opcode VEX.0F 0x25 - invalid */
1203/* Opcode VEX.0F 0x26 - invalid */
1204/* Opcode VEX.0F 0x27 - invalid */
1205
1206/**
1207 * @opcode 0x28
1208 * @oppfx none
1209 * @opcpuid avx
1210 * @opgroup og_avx_pcksclr_datamove
1211 * @opxcpttype 1
1212 * @optest op1=1 op2=2 -> op1=2
1213 * @optest op1=0 op2=-42 -> op1=-42
1214 * @note Almost identical to vmovapd.
1215 */
1216FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1217{
1218 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1220 Assert(pVCpu->iem.s.uVexLength <= 1);
1221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1222 {
1223 /*
1224 * Register, register.
1225 */
1226 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1227 IEM_MC_BEGIN(1, 0);
1228
1229 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1230 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1231 if (pVCpu->iem.s.uVexLength == 0)
1232 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1233 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1234 else
1235 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1236 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1237 IEM_MC_ADVANCE_RIP();
1238 IEM_MC_END();
1239 }
1240 else
1241 {
1242 /*
1243 * Register, memory.
1244 */
1245 if (pVCpu->iem.s.uVexLength == 0)
1246 {
1247 IEM_MC_BEGIN(0, 2);
1248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1249 IEM_MC_LOCAL(RTUINT128U, uSrc);
1250
1251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1255
1256 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1257 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1258
1259 IEM_MC_ADVANCE_RIP();
1260 IEM_MC_END();
1261 }
1262 else
1263 {
1264 IEM_MC_BEGIN(0, 2);
1265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1266 IEM_MC_LOCAL(RTUINT256U, uSrc);
1267
1268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1269 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1270 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1271 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1272
1273 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1274 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1275
1276 IEM_MC_ADVANCE_RIP();
1277 IEM_MC_END();
1278 }
1279 }
1280 return VINF_SUCCESS;
1281}
1282
1283
1284/**
1285 * @opcode 0x28
1286 * @oppfx 66
1287 * @opcpuid avx
1288 * @opgroup og_avx_pcksclr_datamove
1289 * @opxcpttype 1
1290 * @optest op1=1 op2=2 -> op1=2
1291 * @optest op1=0 op2=-42 -> op1=-42
1292 * @note Almost identical to vmovaps
1293 */
1294FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1295{
1296 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1298 Assert(pVCpu->iem.s.uVexLength <= 1);
1299 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1300 {
1301 /*
1302 * Register, register.
1303 */
1304 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1305 IEM_MC_BEGIN(1, 0);
1306
1307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1308 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1309 if (pVCpu->iem.s.uVexLength == 0)
1310 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1311 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1312 else
1313 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1314 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1315 IEM_MC_ADVANCE_RIP();
1316 IEM_MC_END();
1317 }
1318 else
1319 {
1320 /*
1321 * Register, memory.
1322 */
1323 if (pVCpu->iem.s.uVexLength == 0)
1324 {
1325 IEM_MC_BEGIN(0, 2);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_LOCAL(RTUINT128U, uSrc);
1328
1329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1330 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1332 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1333
1334 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1335 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1336
1337 IEM_MC_ADVANCE_RIP();
1338 IEM_MC_END();
1339 }
1340 else
1341 {
1342 IEM_MC_BEGIN(0, 2);
1343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1344 IEM_MC_LOCAL(RTUINT256U, uSrc);
1345
1346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1347 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1348 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1349 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1350
1351 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1352 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1353
1354 IEM_MC_ADVANCE_RIP();
1355 IEM_MC_END();
1356 }
1357 }
1358 return VINF_SUCCESS;
1359}
1360
1361/**
1362 * @opmnemonic udvexf30f28
1363 * @opcode 0x28
1364 * @oppfx 0xf3
1365 * @opunused vex.modrm
1366 * @opcpuid avx
1367 * @optest ->
1368 * @opdone
1369 */
1370
1371/**
1372 * @opmnemonic udvexf20f28
1373 * @opcode 0x28
1374 * @oppfx 0xf2
1375 * @opunused vex.modrm
1376 * @opcpuid avx
1377 * @optest ->
1378 * @opdone
1379 */
1380
1381/**
1382 * @opcode 0x29
1383 * @oppfx none
1384 * @opcpuid avx
1385 * @opgroup og_avx_pcksclr_datamove
1386 * @opxcpttype 1
1387 * @optest op1=1 op2=2 -> op1=2
1388 * @optest op1=0 op2=-42 -> op1=-42
1389 * @note Almost identical to vmovapd.
1390 */
1391FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1392{
1393 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1395 Assert(pVCpu->iem.s.uVexLength <= 1);
1396 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1397 {
1398 /*
1399 * Register, register.
1400 */
1401 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1402 IEM_MC_BEGIN(1, 0);
1403
1404 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1405 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1406 if (pVCpu->iem.s.uVexLength == 0)
1407 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1408 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1409 else
1410 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1411 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1412 IEM_MC_ADVANCE_RIP();
1413 IEM_MC_END();
1414 }
1415 else
1416 {
1417 /*
1418 * Register, memory.
1419 */
1420 if (pVCpu->iem.s.uVexLength == 0)
1421 {
1422 IEM_MC_BEGIN(0, 2);
1423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1424 IEM_MC_LOCAL(RTUINT128U, uSrc);
1425
1426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1427 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1428 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1429 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1430
1431 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1432 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1433
1434 IEM_MC_ADVANCE_RIP();
1435 IEM_MC_END();
1436 }
1437 else
1438 {
1439 IEM_MC_BEGIN(0, 2);
1440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1441 IEM_MC_LOCAL(RTUINT256U, uSrc);
1442
1443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1444 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1445 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1446 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1447
1448 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1449 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1450
1451 IEM_MC_ADVANCE_RIP();
1452 IEM_MC_END();
1453 }
1454 }
1455 return VINF_SUCCESS;
1456}
1457
1458/**
1459 * @opcode 0x29
1460 * @oppfx 66
1461 * @opcpuid avx
1462 * @opgroup og_avx_pcksclr_datamove
1463 * @opxcpttype 1
1464 * @optest op1=1 op2=2 -> op1=2
1465 * @optest op1=0 op2=-42 -> op1=-42
1466 * @note Almost identical to vmovaps
1467 */
1468FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1469{
1470 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1471 Assert(pVCpu->iem.s.uVexLength <= 1);
1472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1473 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1474 {
1475 /*
1476 * Register, register.
1477 */
1478 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1479 IEM_MC_BEGIN(1, 0);
1480
1481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1483 if (pVCpu->iem.s.uVexLength == 0)
1484 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1485 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1486 else
1487 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1488 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1489 IEM_MC_ADVANCE_RIP();
1490 IEM_MC_END();
1491 }
1492 else
1493 {
1494 /*
1495 * Register, memory.
1496 */
1497 if (pVCpu->iem.s.uVexLength == 0)
1498 {
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1501 IEM_MC_LOCAL(RTUINT128U, uSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1507
1508 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1509 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1510
1511 IEM_MC_ADVANCE_RIP();
1512 IEM_MC_END();
1513 }
1514 else
1515 {
1516 IEM_MC_BEGIN(0, 2);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518 IEM_MC_LOCAL(RTUINT256U, uSrc);
1519
1520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1521 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1522 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1523 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1524
1525 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1526 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1527
1528 IEM_MC_ADVANCE_RIP();
1529 IEM_MC_END();
1530 }
1531 }
1532 return VINF_SUCCESS;
1533}
1534
1535
1536/**
1537 * @opmnemonic udvexf30f29
1538 * @opcode 0x29
1539 * @oppfx 0xf3
1540 * @opunused vex.modrm
1541 * @opcpuid avx
1542 * @optest ->
1543 * @opdone
1544 */
1545
1546/**
1547 * @opmnemonic udvexf20f29
1548 * @opcode 0x29
1549 * @oppfx 0xf2
1550 * @opunused vex.modrm
1551 * @opcpuid avx
1552 * @optest ->
1553 * @opdone
1554 */
1555
1556
1557/** Opcode VEX.0F 0x2a - invalid */
1558/** Opcode VEX.66.0F 0x2a - invalid */
1559/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
1560FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
1561/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1562FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
1563
1564
1565/**
1566 * @opcode 0x2b
1567 * @opcodesub !11 mr/reg
1568 * @oppfx none
1569 * @opcpuid avx
1570 * @opgroup og_avx_cachect
1571 * @opxcpttype 1
1572 * @optest op1=1 op2=2 -> op1=2
1573 * @optest op1=0 op2=-42 -> op1=-42
1574 * @note Identical implementation to vmovntpd
1575 */
1576FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1577{
1578 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1579 Assert(pVCpu->iem.s.uVexLength <= 1);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1582 {
1583 /*
1584 * memory, register.
1585 */
1586 if (pVCpu->iem.s.uVexLength == 0)
1587 {
1588 IEM_MC_BEGIN(0, 2);
1589 IEM_MC_LOCAL(RTUINT128U, uSrc);
1590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1591
1592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1593 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1595 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1596
1597 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1598 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1599
1600 IEM_MC_ADVANCE_RIP();
1601 IEM_MC_END();
1602 }
1603 else
1604 {
1605 IEM_MC_BEGIN(0, 2);
1606 IEM_MC_LOCAL(RTUINT256U, uSrc);
1607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1608
1609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1610 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1612 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1613
1614 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1615 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1616
1617 IEM_MC_ADVANCE_RIP();
1618 IEM_MC_END();
1619 }
1620 }
1621 /* The register, register encoding is invalid. */
1622 else
1623 return IEMOP_RAISE_INVALID_OPCODE();
1624 return VINF_SUCCESS;
1625}
1626
1627/**
1628 * @opcode 0x2b
1629 * @opcodesub !11 mr/reg
1630 * @oppfx 0x66
1631 * @opcpuid avx
1632 * @opgroup og_avx_cachect
1633 * @opxcpttype 1
1634 * @optest op1=1 op2=2 -> op1=2
1635 * @optest op1=0 op2=-42 -> op1=-42
1636 * @note Identical implementation to vmovntps
1637 */
1638FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1639{
1640 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1641 Assert(pVCpu->iem.s.uVexLength <= 1);
1642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1643 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1644 {
1645 /*
1646 * memory, register.
1647 */
1648 if (pVCpu->iem.s.uVexLength == 0)
1649 {
1650 IEM_MC_BEGIN(0, 2);
1651 IEM_MC_LOCAL(RTUINT128U, uSrc);
1652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1653
1654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1658
1659 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1660 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1661
1662 IEM_MC_ADVANCE_RIP();
1663 IEM_MC_END();
1664 }
1665 else
1666 {
1667 IEM_MC_BEGIN(0, 2);
1668 IEM_MC_LOCAL(RTUINT256U, uSrc);
1669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1670
1671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1672 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1674 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1675
1676 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1677 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1678
1679 IEM_MC_ADVANCE_RIP();
1680 IEM_MC_END();
1681 }
1682 }
1683 /* The register, register encoding is invalid. */
1684 else
1685 return IEMOP_RAISE_INVALID_OPCODE();
1686 return VINF_SUCCESS;
1687}
1688
1689/**
1690 * @opmnemonic udvexf30f2b
1691 * @opcode 0x2b
1692 * @oppfx 0xf3
1693 * @opunused vex.modrm
1694 * @opcpuid avx
1695 * @optest ->
1696 * @opdone
1697 */
1698
1699/**
1700 * @opmnemonic udvexf20f2b
1701 * @opcode 0x2b
1702 * @oppfx 0xf2
1703 * @opunused vex.modrm
1704 * @opcpuid avx
1705 * @optest ->
1706 * @opdone
1707 */
1708
1709
1710/* Opcode VEX.0F 0x2c - invalid */
1711/* Opcode VEX.66.0F 0x2c - invalid */
1712/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
1713FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1714/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
1715FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1716
1717/* Opcode VEX.0F 0x2d - invalid */
1718/* Opcode VEX.66.0F 0x2d - invalid */
1719/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1720FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1721/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1722FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1723
1724/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1725FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1726/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1727FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1728/* Opcode VEX.F3.0F 0x2e - invalid */
1729/* Opcode VEX.F2.0F 0x2e - invalid */
1730
1731/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1732FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1733/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1734FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1735/* Opcode VEX.F3.0F 0x2f - invalid */
1736/* Opcode VEX.F2.0F 0x2f - invalid */
1737
1738/* Opcode VEX.0F 0x30 - invalid */
1739/* Opcode VEX.0F 0x31 - invalid */
1740/* Opcode VEX.0F 0x32 - invalid */
1741/* Opcode VEX.0F 0x33 - invalid */
1742/* Opcode VEX.0F 0x34 - invalid */
1743/* Opcode VEX.0F 0x35 - invalid */
1744/* Opcode VEX.0F 0x36 - invalid */
1745/* Opcode VEX.0F 0x37 - invalid */
1746/* Opcode VEX.0F 0x38 - invalid */
1747/* Opcode VEX.0F 0x39 - invalid */
1748/* Opcode VEX.0F 0x3a - invalid */
1749/* Opcode VEX.0F 0x3b - invalid */
1750/* Opcode VEX.0F 0x3c - invalid */
1751/* Opcode VEX.0F 0x3d - invalid */
1752/* Opcode VEX.0F 0x3e - invalid */
1753/* Opcode VEX.0F 0x3f - invalid */
1754/* Opcode VEX.0F 0x40 - invalid */
1755/* Opcode VEX.0F 0x41 - invalid */
1756/* Opcode VEX.0F 0x42 - invalid */
1757/* Opcode VEX.0F 0x43 - invalid */
1758/* Opcode VEX.0F 0x44 - invalid */
1759/* Opcode VEX.0F 0x45 - invalid */
1760/* Opcode VEX.0F 0x46 - invalid */
1761/* Opcode VEX.0F 0x47 - invalid */
1762/* Opcode VEX.0F 0x48 - invalid */
1763/* Opcode VEX.0F 0x49 - invalid */
1764/* Opcode VEX.0F 0x4a - invalid */
1765/* Opcode VEX.0F 0x4b - invalid */
1766/* Opcode VEX.0F 0x4c - invalid */
1767/* Opcode VEX.0F 0x4d - invalid */
1768/* Opcode VEX.0F 0x4e - invalid */
1769/* Opcode VEX.0F 0x4f - invalid */
1770
1771/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1772FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1773/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1774FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1775/* Opcode VEX.F3.0F 0x50 - invalid */
1776/* Opcode VEX.F2.0F 0x50 - invalid */
1777
1778/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1779FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1780/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1781FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1782/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1783FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1784/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1785FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1786
1787/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1788FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1789/* Opcode VEX.66.0F 0x52 - invalid */
1790/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1791FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1792/* Opcode VEX.F2.0F 0x52 - invalid */
1793
1794/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1795FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1796/* Opcode VEX.66.0F 0x53 - invalid */
1797/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1798FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1799/* Opcode VEX.F2.0F 0x53 - invalid */
1800
1801
1802/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1803FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
1804{
1805 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1806 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1807 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1808}
1809
1810
1811/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1812FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
1813{
1814 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1815 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1816 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
1817}
1818
1819
1820/* Opcode VEX.F3.0F 0x54 - invalid */
1821/* Opcode VEX.F2.0F 0x54 - invalid */
1822
1823
1824/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1825FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
1826{
1827 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1828 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1829 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1830}
1831
1832
1833/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1834FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
1835{
1836 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1837 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1838 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
1839}
1840
1841
1842/* Opcode VEX.F3.0F 0x55 - invalid */
1843/* Opcode VEX.F2.0F 0x55 - invalid */
1844
1845/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1846FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
1847{
1848 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1849 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1850 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
1851}
1852
1853
1854/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1855FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
1856{
1857 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1859 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
1860}
1861
1862
1863/* Opcode VEX.F3.0F 0x56 - invalid */
1864/* Opcode VEX.F2.0F 0x56 - invalid */
1865
1866
1867/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
1868FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
1869{
1870 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
1871 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1872 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
1873}
1874
1875
1876/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
1877FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
1878{
1879 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
1880 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
1881 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
1882}
1883
1884
1885/* Opcode VEX.F3.0F 0x57 - invalid */
1886/* Opcode VEX.F2.0F 0x57 - invalid */
1887
1888/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
1889FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1890/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
1891FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1892/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
1893FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1894/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
1895FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1896
1897/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
1898FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1899/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
1900FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1901/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
1902FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1903/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
1904FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1905
1906/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
1907FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1908/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
1909FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1910/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
1911FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1912/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1913FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1914
1915/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
1916FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1917/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
1918FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1919/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
1920FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1921/* Opcode VEX.F2.0F 0x5b - invalid */
1922
1923/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
1924FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
1925/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
1926FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
1927/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
1928FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
1929/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
1930FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
1931
1932/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
1933FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
1934/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
1935FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
1936/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
1937FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
1938/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
1939FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
1940
1941/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
1942FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
1943/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
1944FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
1945/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
1946FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
1947/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
1948FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
1949
1950/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
1951FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
1952/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
1953FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
1954/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
1955FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
1956/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
1957FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
1958
1959
1960///**
1961// * Common worker for SSE2 instructions on the forms:
1962// * pxxxx xmm1, xmm2/mem128
1963// *
1964// * The 2nd operand is the first half of a register, which in the memory case
1965// * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1966// * memory accessed for MMX.
1967// *
1968// * Exceptions type 4.
1969// */
1970//FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1971//{
1972// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1973// if (!pImpl->pfnU64)
1974// return IEMOP_RAISE_INVALID_OPCODE();
1975// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1976// {
1977// /*
1978// * Register, register.
1979// */
1980// /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
1981// /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
1982// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983// IEM_MC_BEGIN(2, 0);
1984// IEM_MC_ARG(uint64_t *, pDst, 0);
1985// IEM_MC_ARG(uint32_t const *, pSrc, 1);
1986// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1987// IEM_MC_PREPARE_FPU_USAGE();
1988// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1989// IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
1990// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1991// IEM_MC_ADVANCE_RIP();
1992// IEM_MC_END();
1993// }
1994// else
1995// {
1996// /*
1997// * Register, memory.
1998// */
1999// IEM_MC_BEGIN(2, 2);
2000// IEM_MC_ARG(uint64_t *, pDst, 0);
2001// IEM_MC_LOCAL(uint32_t, uSrc);
2002// IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2003// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004//
2005// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2006// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2007// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2008// IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2009//
2010// IEM_MC_PREPARE_FPU_USAGE();
2011// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2012// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2013//
2014// IEM_MC_ADVANCE_RIP();
2015// IEM_MC_END();
2016// }
2017// return VINF_SUCCESS;
2018//}
2019
2020
2021/* Opcode VEX.0F 0x60 - invalid */
2022
2023/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, W */
2024FNIEMOP_STUB(iemOp_vpunpcklbw_Vx_Hx_Wx);
2025//FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2026//{
2027// IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2028// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2029//}
2030
2031/* Opcode VEX.F3.0F 0x60 - invalid */
2032
2033
2034/* Opcode VEX.0F 0x61 - invalid */
2035
2036/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2037FNIEMOP_STUB(iemOp_vpunpcklwd_Vx_Hx_Wx);
2038//FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2039//{
2040// IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2041// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2042//}
2043
2044/* Opcode VEX.F3.0F 0x61 - invalid */
2045
2046
2047/* Opcode VEX.0F 0x62 - invalid */
2048
2049/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2050FNIEMOP_STUB(iemOp_vpunpckldq_Vx_Hx_Wx);
2051//FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2052//{
2053// IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2054// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2055//}
2056
2057/* Opcode VEX.F3.0F 0x62 - invalid */
2058
2059
2060
2061/* Opcode VEX.0F 0x63 - invalid */
2062/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2063FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2064/* Opcode VEX.F3.0F 0x63 - invalid */
2065
2066/* Opcode VEX.0F 0x64 - invalid */
2067/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2068FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2069/* Opcode VEX.F3.0F 0x64 - invalid */
2070
2071/* Opcode VEX.0F 0x65 - invalid */
2072/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2073FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2074/* Opcode VEX.F3.0F 0x65 - invalid */
2075
2076/* Opcode VEX.0F 0x66 - invalid */
2077/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2078FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2079/* Opcode VEX.F3.0F 0x66 - invalid */
2080
2081/* Opcode VEX.0F 0x67 - invalid */
2082/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2083FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2084/* Opcode VEX.F3.0F 0x67 - invalid */
2085
2086
2087///**
2088// * Common worker for SSE2 instructions on the form:
2089// * pxxxx xmm1, xmm2/mem128
2090// *
2091// * The 2nd operand is the second half of a register, which in the memory case
2092// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
2093// * where it may read the full 128 bits or only the upper 64 bits.
2094// *
2095// * Exceptions type 4.
2096// */
2097//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2098//{
2099// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2100// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2101// {
2102// /*
2103// * Register, register.
2104// */
2105// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2106// IEM_MC_BEGIN(2, 0);
2107// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2108// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2109// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2110// IEM_MC_PREPARE_SSE_USAGE();
2111// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2112// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2113// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2114// IEM_MC_ADVANCE_RIP();
2115// IEM_MC_END();
2116// }
2117// else
2118// {
2119// /*
2120// * Register, memory.
2121// */
2122// IEM_MC_BEGIN(2, 2);
2123// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2124// IEM_MC_LOCAL(RTUINT128U, uSrc);
2125// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2126// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2127//
2128// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2129// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2131// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2132//
2133// IEM_MC_PREPARE_SSE_USAGE();
2134// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2135// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2136//
2137// IEM_MC_ADVANCE_RIP();
2138// IEM_MC_END();
2139// }
2140// return VINF_SUCCESS;
2141//}
2142
2143
2144/* Opcode VEX.0F 0x68 - invalid */
2145
2146/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
2147FNIEMOP_STUB(iemOp_vpunpckhbw_Vx_Hx_Wx);
2148//FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
2149//{
2150// IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
2151// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2152//}
2153/* Opcode VEX.F3.0F 0x68 - invalid */
2154
2155
2156/* Opcode VEX.0F 0x69 - invalid */
2157
2158/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
2159FNIEMOP_STUB(iemOp_vpunpckhwd_Vx_Hx_Wx);
2160//FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
2161//{
2162// IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
2163// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2164//
2165//}
2166/* Opcode VEX.F3.0F 0x69 - invalid */
2167
2168
2169/* Opcode VEX.0F 0x6a - invalid */
2170
2171/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
2172FNIEMOP_STUB(iemOp_vpunpckhdq_Vx_Hx_W);
2173//FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
2174//{
2175// IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
2176// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2177//}
2178/* Opcode VEX.F3.0F 0x6a - invalid */
2179
2180
2181/* Opcode VEX.0F 0x6b - invalid */
2182/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
2183FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
2184/* Opcode VEX.F3.0F 0x6b - invalid */
2185
2186
2187/* Opcode VEX.0F 0x6c - invalid */
2188
2189/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
2190FNIEMOP_STUB(iemOp_vpunpcklqdq_Vx_Hx_Wx);
2191//FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2192//{
2193// IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2194// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2195//}
2196
2197/* Opcode VEX.F3.0F 0x6c - invalid */
2198/* Opcode VEX.F2.0F 0x6c - invalid */
2199
2200
2201/* Opcode VEX.0F 0x6d - invalid */
2202
2203/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
2204FNIEMOP_STUB(iemOp_vpunpckhqdq_Vx_Hx_W);
2205//FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
2206//{
2207// IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2208// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2209//}
2210
2211/* Opcode VEX.F3.0F 0x6d - invalid */
2212
2213
2214/* Opcode VEX.0F 0x6e - invalid */
2215
2216FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
2217{
2218 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2219 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2220 {
2221 /**
2222 * @opcode 0x6e
2223 * @opcodesub rex.w=1
2224 * @oppfx 0x66
2225 * @opcpuid avx
2226 * @opgroup og_avx_simdint_datamov
2227 * @opxcpttype 5
2228 * @optest 64-bit / op1=1 op2=2 -> op1=2
2229 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2230 */
2231 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2232 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2233 {
2234 /* XMM, greg64 */
2235 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2236 IEM_MC_BEGIN(0, 1);
2237 IEM_MC_LOCAL(uint64_t, u64Tmp);
2238
2239 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2240 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2241
2242 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2243 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2244
2245 IEM_MC_ADVANCE_RIP();
2246 IEM_MC_END();
2247 }
2248 else
2249 {
2250 /* XMM, [mem64] */
2251 IEM_MC_BEGIN(0, 2);
2252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2253 IEM_MC_LOCAL(uint64_t, u64Tmp);
2254
2255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2256 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2257 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2258 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2259
2260 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2261 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2262
2263 IEM_MC_ADVANCE_RIP();
2264 IEM_MC_END();
2265 }
2266 }
2267 else
2268 {
2269 /**
2270 * @opdone
2271 * @opcode 0x6e
2272 * @opcodesub rex.w=0
2273 * @oppfx 0x66
2274 * @opcpuid avx
2275 * @opgroup og_avx_simdint_datamov
2276 * @opxcpttype 5
2277 * @opfunction iemOp_vmovd_q_Vy_Ey
2278 * @optest op1=1 op2=2 -> op1=2
2279 * @optest op1=0 op2=-42 -> op1=-42
2280 */
2281 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2282 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2283 {
2284 /* XMM, greg32 */
2285 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2286 IEM_MC_BEGIN(0, 1);
2287 IEM_MC_LOCAL(uint32_t, u32Tmp);
2288
2289 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2290 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2291
2292 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2293 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2294
2295 IEM_MC_ADVANCE_RIP();
2296 IEM_MC_END();
2297 }
2298 else
2299 {
2300 /* XMM, [mem32] */
2301 IEM_MC_BEGIN(0, 2);
2302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2303 IEM_MC_LOCAL(uint32_t, u32Tmp);
2304
2305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2306 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2308 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2309
2310 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2311 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2312
2313 IEM_MC_ADVANCE_RIP();
2314 IEM_MC_END();
2315 }
2316 }
2317 return VINF_SUCCESS;
2318}
2319
2320
2321/* Opcode VEX.F3.0F 0x6e - invalid */
2322
2323
2324/* Opcode VEX.0F 0x6f - invalid */
2325
2326/**
2327 * @opcode 0x6f
2328 * @oppfx 0x66
2329 * @opcpuid avx
2330 * @opgroup og_avx_simdint_datamove
2331 * @opxcpttype 1
2332 * @optest op1=1 op2=2 -> op1=2
2333 * @optest op1=0 op2=-42 -> op1=-42
2334 */
2335FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
2336{
2337 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2338 Assert(pVCpu->iem.s.uVexLength <= 1);
2339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2341 {
2342 /*
2343 * Register, register.
2344 */
2345 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2346 IEM_MC_BEGIN(0, 0);
2347
2348 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2349 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2350 if (pVCpu->iem.s.uVexLength == 0)
2351 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2352 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2353 else
2354 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2355 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2356 IEM_MC_ADVANCE_RIP();
2357 IEM_MC_END();
2358 }
2359 else if (pVCpu->iem.s.uVexLength == 0)
2360 {
2361 /*
2362 * Register, memory128.
2363 */
2364 IEM_MC_BEGIN(0, 2);
2365 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2367
2368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2369 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2370 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2371 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2372
2373 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2374 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2375
2376 IEM_MC_ADVANCE_RIP();
2377 IEM_MC_END();
2378 }
2379 else
2380 {
2381 /*
2382 * Register, memory256.
2383 */
2384 IEM_MC_BEGIN(0, 2);
2385 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2387
2388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2389 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2390 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2391 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2392
2393 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2394 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2395
2396 IEM_MC_ADVANCE_RIP();
2397 IEM_MC_END();
2398 }
2399 return VINF_SUCCESS;
2400}
2401
2402/**
2403 * @opcode 0x6f
2404 * @oppfx 0xf3
2405 * @opcpuid avx
2406 * @opgroup og_avx_simdint_datamove
2407 * @opxcpttype 4UA
2408 * @optest op1=1 op2=2 -> op1=2
2409 * @optest op1=0 op2=-42 -> op1=-42
2410 */
2411FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
2412{
2413 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2414 Assert(pVCpu->iem.s.uVexLength <= 1);
2415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2416 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2417 {
2418 /*
2419 * Register, register.
2420 */
2421 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2422 IEM_MC_BEGIN(0, 0);
2423
2424 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2425 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2426 if (pVCpu->iem.s.uVexLength == 0)
2427 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2428 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2429 else
2430 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2431 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2432 IEM_MC_ADVANCE_RIP();
2433 IEM_MC_END();
2434 }
2435 else if (pVCpu->iem.s.uVexLength == 0)
2436 {
2437 /*
2438 * Register, memory128.
2439 */
2440 IEM_MC_BEGIN(0, 2);
2441 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2443
2444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2445 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2447 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2448
2449 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2450 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2451
2452 IEM_MC_ADVANCE_RIP();
2453 IEM_MC_END();
2454 }
2455 else
2456 {
2457 /*
2458 * Register, memory256.
2459 */
2460 IEM_MC_BEGIN(0, 2);
2461 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
2462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2463
2464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2465 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2467 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2468
2469 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2470 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u256Tmp);
2471
2472 IEM_MC_ADVANCE_RIP();
2473 IEM_MC_END();
2474 }
2475 return VINF_SUCCESS;
2476}
2477
2478
2479/* Opcode VEX.0F 0x70 - invalid */
2480
2481/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
2482FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib);
2483//FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
2484//{
2485// IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
2486// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2487// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2488// {
2489// /*
2490// * Register, register.
2491// */
2492// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2493// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2494//
2495// IEM_MC_BEGIN(3, 0);
2496// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2497// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2498// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2499// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2500// IEM_MC_PREPARE_SSE_USAGE();
2501// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2502// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2503// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2504// IEM_MC_ADVANCE_RIP();
2505// IEM_MC_END();
2506// }
2507// else
2508// {
2509// /*
2510// * Register, memory.
2511// */
2512// IEM_MC_BEGIN(3, 2);
2513// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2514// IEM_MC_LOCAL(RTUINT128U, uSrc);
2515// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2516// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2517//
2518// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2519// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2520// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2521// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2522// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2523//
2524// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2525// IEM_MC_PREPARE_SSE_USAGE();
2526// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2527// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2528//
2529// IEM_MC_ADVANCE_RIP();
2530// IEM_MC_END();
2531// }
2532// return VINF_SUCCESS;
2533//}
2534
2535/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
2536FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib);
2537//FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2538//{
2539// IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2540// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2541// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2542// {
2543// /*
2544// * Register, register.
2545// */
2546// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2547// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2548//
2549// IEM_MC_BEGIN(3, 0);
2550// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2551// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2552// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2553// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2554// IEM_MC_PREPARE_SSE_USAGE();
2555// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2556// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2557// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2558// IEM_MC_ADVANCE_RIP();
2559// IEM_MC_END();
2560// }
2561// else
2562// {
2563// /*
2564// * Register, memory.
2565// */
2566// IEM_MC_BEGIN(3, 2);
2567// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2568// IEM_MC_LOCAL(RTUINT128U, uSrc);
2569// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2570// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2571//
2572// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2573// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2574// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2575// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2576// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2577//
2578// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2579// IEM_MC_PREPARE_SSE_USAGE();
2580// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2581// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2582//
2583// IEM_MC_ADVANCE_RIP();
2584// IEM_MC_END();
2585// }
2586// return VINF_SUCCESS;
2587//}
2588
2589/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
2590FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib);
2591//FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2592//{
2593// IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2594// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2595// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2596// {
2597// /*
2598// * Register, register.
2599// */
2600// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2601// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2602//
2603// IEM_MC_BEGIN(3, 0);
2604// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2605// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2606// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2607// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2608// IEM_MC_PREPARE_SSE_USAGE();
2609// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2610// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2611// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2612// IEM_MC_ADVANCE_RIP();
2613// IEM_MC_END();
2614// }
2615// else
2616// {
2617// /*
2618// * Register, memory.
2619// */
2620// IEM_MC_BEGIN(3, 2);
2621// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2622// IEM_MC_LOCAL(RTUINT128U, uSrc);
2623// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2624// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2625//
2626// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2627// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2628// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2629// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2630// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2631//
2632// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2633// IEM_MC_PREPARE_SSE_USAGE();
2634// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2635// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2636//
2637// IEM_MC_ADVANCE_RIP();
2638// IEM_MC_END();
2639// }
2640// return VINF_SUCCESS;
2641//}
2642
2643
2644/* Opcode VEX.0F 0x71 11/2 - invalid. */
2645/** Opcode VEX.66.0F 0x71 11/2. */
2646FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2647
2648/* Opcode VEX.0F 0x71 11/4 - invalid */
2649/** Opcode VEX.66.0F 0x71 11/4. */
2650FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2651
2652/* Opcode VEX.0F 0x71 11/6 - invalid */
2653/** Opcode VEX.66.0F 0x71 11/6. */
2654FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2655
2656
2657/**
2658 * VEX Group 12 jump table for register variant.
2659 */
2660IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
2661{
2662 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2663 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2664 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2665 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2666 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2667 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2668 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2669 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2670};
2671AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
2672
2673
2674/** Opcode VEX.0F 0x71. */
2675FNIEMOP_DEF(iemOp_VGrp12)
2676{
2677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2679 /* register, register */
2680 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2681 + pVCpu->iem.s.idxPrefix], bRm);
2682 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2683}
2684
2685
2686/* Opcode VEX.0F 0x72 11/2 - invalid. */
2687/** Opcode VEX.66.0F 0x72 11/2. */
2688FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2689
2690/* Opcode VEX.0F 0x72 11/4 - invalid. */
2691/** Opcode VEX.66.0F 0x72 11/4. */
2692FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2693
2694/* Opcode VEX.0F 0x72 11/6 - invalid. */
2695/** Opcode VEX.66.0F 0x72 11/6. */
2696FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2697
2698
2699/**
2700 * Group 13 jump table for register variant.
2701 */
2702IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
2703{
2704 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2705 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2706 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2707 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2708 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2709 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2710 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2711 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2712};
2713AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
2714
2715/** Opcode VEX.0F 0x72. */
2716FNIEMOP_DEF(iemOp_VGrp13)
2717{
2718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2720 /* register, register */
2721 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2722 + pVCpu->iem.s.idxPrefix], bRm);
2723 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2724}
2725
2726
2727/* Opcode VEX.0F 0x73 11/2 - invalid. */
2728/** Opcode VEX.66.0F 0x73 11/2. */
2729FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
2730
2731/** Opcode VEX.66.0F 0x73 11/3. */
2732FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
2733
2734/* Opcode VEX.0F 0x73 11/6 - invalid. */
2735/** Opcode VEX.66.0F 0x73 11/6. */
2736FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
2737
2738/** Opcode VEX.66.0F 0x73 11/7. */
2739FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
2740
2741/**
2742 * Group 14 jump table for register variant.
2743 */
2744IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
2745{
2746 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2747 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2748 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2749 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2750 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2751 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2752 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2753 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2754};
2755AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
2756
2757
2758/** Opcode VEX.0F 0x73. */
2759FNIEMOP_DEF(iemOp_VGrp14)
2760{
2761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2763 /* register, register */
2764 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2765 + pVCpu->iem.s.idxPrefix], bRm);
2766 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2767}
2768
2769
2770/* Opcode VEX.0F 0x74 - invalid */
2771
2772/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
2773FNIEMOP_STUB(iemOp_vpcmpeqb_Vx_Hx_Wx);
2774//FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
2775//{
2776// IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
2777// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2778//}
2779
2780/* Opcode VEX.F3.0F 0x74 - invalid */
2781/* Opcode VEX.F2.0F 0x74 - invalid */
2782
2783
2784/* Opcode VEX.0F 0x75 - invalid */
2785
2786/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
2787FNIEMOP_STUB(iemOp_vpcmpeqw_Vx_Hx_Wx);
2788//FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
2789//{
2790// IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
2791// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2792//}
2793
2794/* Opcode VEX.F3.0F 0x75 - invalid */
2795/* Opcode VEX.F2.0F 0x75 - invalid */
2796
2797
2798/* Opcode VEX.0F 0x76 - invalid */
2799
2800/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
2801FNIEMOP_STUB(iemOp_vpcmpeqd_Vx_Hx_Wx);
2802//FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
2803//{
2804// IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
2805// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2806//}
2807
2808/* Opcode VEX.F3.0F 0x76 - invalid */
2809/* Opcode VEX.F2.0F 0x76 - invalid */
2810
2811
2812/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
2813FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
2814/* Opcode VEX.66.0F 0x77 - invalid */
2815/* Opcode VEX.F3.0F 0x77 - invalid */
2816/* Opcode VEX.F2.0F 0x77 - invalid */
2817
2818/* Opcode VEX.0F 0x78 - invalid */
2819/* Opcode VEX.66.0F 0x78 - invalid */
2820/* Opcode VEX.F3.0F 0x78 - invalid */
2821/* Opcode VEX.F2.0F 0x78 - invalid */
2822
2823/* Opcode VEX.0F 0x79 - invalid */
2824/* Opcode VEX.66.0F 0x79 - invalid */
2825/* Opcode VEX.F3.0F 0x79 - invalid */
2826/* Opcode VEX.F2.0F 0x79 - invalid */
2827
2828/* Opcode VEX.0F 0x7a - invalid */
2829/* Opcode VEX.66.0F 0x7a - invalid */
2830/* Opcode VEX.F3.0F 0x7a - invalid */
2831/* Opcode VEX.F2.0F 0x7a - invalid */
2832
2833/* Opcode VEX.0F 0x7b - invalid */
2834/* Opcode VEX.66.0F 0x7b - invalid */
2835/* Opcode VEX.F3.0F 0x7b - invalid */
2836/* Opcode VEX.F2.0F 0x7b - invalid */
2837
2838/* Opcode VEX.0F 0x7c - invalid */
2839/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2840FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2841/* Opcode VEX.F3.0F 0x7c - invalid */
2842/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2843FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2844
2845/* Opcode VEX.0F 0x7d - invalid */
2846/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2847FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2848/* Opcode VEX.F3.0F 0x7d - invalid */
2849/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2850FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2851
2852
2853/* Opcode VEX.0F 0x7e - invalid */
2854
2855FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2856{
2857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2858 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2859 {
2860 /**
2861 * @opcode 0x7e
2862 * @opcodesub rex.w=1
2863 * @oppfx 0x66
2864 * @opcpuid avx
2865 * @opgroup og_avx_simdint_datamov
2866 * @opxcpttype 5
2867 * @optest 64-bit / op1=1 op2=2 -> op1=2
2868 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
2869 */
2870 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /* greg64, XMM */
2874 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2875 IEM_MC_BEGIN(0, 1);
2876 IEM_MC_LOCAL(uint64_t, u64Tmp);
2877
2878 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2879 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2880
2881 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2882 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
2883
2884 IEM_MC_ADVANCE_RIP();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* [mem64], XMM */
2890 IEM_MC_BEGIN(0, 2);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2892 IEM_MC_LOCAL(uint64_t, u64Tmp);
2893
2894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2895 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2896 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2897 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2898
2899 IEM_MC_FETCH_YREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2900 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
2901
2902 IEM_MC_ADVANCE_RIP();
2903 IEM_MC_END();
2904 }
2905 }
2906 else
2907 {
2908 /**
2909 * @opdone
2910 * @opcode 0x7e
2911 * @opcodesub rex.w=0
2912 * @oppfx 0x66
2913 * @opcpuid avx
2914 * @opgroup og_avx_simdint_datamov
2915 * @opxcpttype 5
2916 * @opfunction iemOp_vmovd_q_Vy_Ey
2917 * @optest op1=1 op2=2 -> op1=2
2918 * @optest op1=0 op2=-42 -> op1=-42
2919 */
2920 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
2921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2922 {
2923 /* greg32, XMM */
2924 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2925 IEM_MC_BEGIN(0, 1);
2926 IEM_MC_LOCAL(uint32_t, u32Tmp);
2927
2928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2930
2931 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2932 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
2933
2934 IEM_MC_ADVANCE_RIP();
2935 IEM_MC_END();
2936 }
2937 else
2938 {
2939 /* [mem32], XMM */
2940 IEM_MC_BEGIN(0, 2);
2941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2942 IEM_MC_LOCAL(uint32_t, u32Tmp);
2943
2944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2945 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2946 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2947 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2948
2949 IEM_MC_FETCH_YREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2950 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
2951
2952 IEM_MC_ADVANCE_RIP();
2953 IEM_MC_END();
2954 }
2955 }
2956 return VINF_SUCCESS;
2957}
2958
2959/**
2960 * @opcode 0x7e
2961 * @oppfx 0xf3
2962 * @opcpuid avx
2963 * @opgroup og_avx_pcksclr_datamove
2964 * @opxcpttype none
2965 * @optest op1=1 op2=2 -> op1=2
2966 * @optest op1=0 op2=-42 -> op1=-42
2967 */
2968FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
2969{
2970 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2973 {
2974 /*
2975 * Register, register.
2976 */
2977 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2978 IEM_MC_BEGIN(0, 0);
2979
2980 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2981 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2982
2983 IEM_MC_COPY_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2984 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2985 IEM_MC_ADVANCE_RIP();
2986 IEM_MC_END();
2987 }
2988 else
2989 {
2990 /*
2991 * Memory, register.
2992 */
2993 IEM_MC_BEGIN(0, 2);
2994 IEM_MC_LOCAL(uint64_t, uSrc);
2995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2996
2997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2998 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2999 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3000 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3001
3002 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3003 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
3004
3005 IEM_MC_ADVANCE_RIP();
3006 IEM_MC_END();
3007 }
3008 return VINF_SUCCESS;
3009
3010}
3011/* Opcode VEX.F2.0F 0x7e - invalid */
3012
3013
3014/* Opcode VEX.0F 0x7f - invalid */
3015
3016/**
3017 * @opcode 0x7f
3018 * @oppfx 0x66
3019 * @opcpuid avx
3020 * @opgroup og_avx_simdint_datamove
3021 * @opxcpttype 1
3022 * @optest op1=1 op2=2 -> op1=2
3023 * @optest op1=0 op2=-42 -> op1=-42
3024 */
3025FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3026{
3027 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3028 Assert(pVCpu->iem.s.uVexLength <= 1);
3029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3030 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3031 {
3032 /*
3033 * Register, register.
3034 */
3035 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3036 IEM_MC_BEGIN(0, 0);
3037
3038 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3039 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3040 if (pVCpu->iem.s.uVexLength == 0)
3041 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3042 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3043 else
3044 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3045 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 }
3049 else if (pVCpu->iem.s.uVexLength == 0)
3050 {
3051 /*
3052 * Register, memory128.
3053 */
3054 IEM_MC_BEGIN(0, 2);
3055 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3057
3058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3059 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3060 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3061 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3062
3063 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3064 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3065
3066 IEM_MC_ADVANCE_RIP();
3067 IEM_MC_END();
3068 }
3069 else
3070 {
3071 /*
3072 * Register, memory256.
3073 */
3074 IEM_MC_BEGIN(0, 2);
3075 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3077
3078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3079 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3080 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3081 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3082
3083 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3084 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3085
3086 IEM_MC_ADVANCE_RIP();
3087 IEM_MC_END();
3088 }
3089 return VINF_SUCCESS;
3090}
3091
3092/**
3093 * @opcode 0x7f
3094 * @oppfx 0xf3
3095 * @opcpuid avx
3096 * @opgroup og_avx_simdint_datamove
3097 * @opxcpttype 4UA
3098 * @optest op1=1 op2=2 -> op1=2
3099 * @optest op1=0 op2=-42 -> op1=-42
3100 */
3101FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
3102{
3103 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3104 Assert(pVCpu->iem.s.uVexLength <= 1);
3105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3107 {
3108 /*
3109 * Register, register.
3110 */
3111 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3112 IEM_MC_BEGIN(0, 0);
3113
3114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3115 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3116 if (pVCpu->iem.s.uVexLength == 0)
3117 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3118 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3119 else
3120 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3121 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3122 IEM_MC_ADVANCE_RIP();
3123 IEM_MC_END();
3124 }
3125 else if (pVCpu->iem.s.uVexLength == 0)
3126 {
3127 /*
3128 * Register, memory128.
3129 */
3130 IEM_MC_BEGIN(0, 2);
3131 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3133
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3135 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3136 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3137 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3138
3139 IEM_MC_FETCH_YREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3140 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3141
3142 IEM_MC_ADVANCE_RIP();
3143 IEM_MC_END();
3144 }
3145 else
3146 {
3147 /*
3148 * Register, memory256.
3149 */
3150 IEM_MC_BEGIN(0, 2);
3151 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3153
3154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3155 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3156 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3157 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3158
3159 IEM_MC_FETCH_YREG_U256(u256Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3160 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
3161
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 return VINF_SUCCESS;
3166}
3167
3168/* Opcode VEX.F2.0F 0x7f - invalid */
3169
3170
3171/* Opcode VEX.0F 0x80 - invalid */
3172/* Opcode VEX.0F 0x81 - invalid */
3173/* Opcode VEX.0F 0x82 - invalid */
3174/* Opcode VEX.0F 0x83 - invalid */
3175/* Opcode VEX.0F 0x84 - invalid */
3176/* Opcode VEX.0F 0x85 - invalid */
3177/* Opcode VEX.0F 0x86 - invalid */
3178/* Opcode VEX.0F 0x87 - invalid */
3179/* Opcode VEX.0F 0x88 - invalid */
3180/* Opcode VEX.0F 0x89 - invalid */
3181/* Opcode VEX.0F 0x8a - invalid */
3182/* Opcode VEX.0F 0x8b - invalid */
3183/* Opcode VEX.0F 0x8c - invalid */
3184/* Opcode VEX.0F 0x8d - invalid */
3185/* Opcode VEX.0F 0x8e - invalid */
3186/* Opcode VEX.0F 0x8f - invalid */
3187/* Opcode VEX.0F 0x90 - invalid */
3188/* Opcode VEX.0F 0x91 - invalid */
3189/* Opcode VEX.0F 0x92 - invalid */
3190/* Opcode VEX.0F 0x93 - invalid */
3191/* Opcode VEX.0F 0x94 - invalid */
3192/* Opcode VEX.0F 0x95 - invalid */
3193/* Opcode VEX.0F 0x96 - invalid */
3194/* Opcode VEX.0F 0x97 - invalid */
3195/* Opcode VEX.0F 0x98 - invalid */
3196/* Opcode VEX.0F 0x99 - invalid */
3197/* Opcode VEX.0F 0x9a - invalid */
3198/* Opcode VEX.0F 0x9b - invalid */
3199/* Opcode VEX.0F 0x9c - invalid */
3200/* Opcode VEX.0F 0x9d - invalid */
3201/* Opcode VEX.0F 0x9e - invalid */
3202/* Opcode VEX.0F 0x9f - invalid */
3203/* Opcode VEX.0F 0xa0 - invalid */
3204/* Opcode VEX.0F 0xa1 - invalid */
3205/* Opcode VEX.0F 0xa2 - invalid */
3206/* Opcode VEX.0F 0xa3 - invalid */
3207/* Opcode VEX.0F 0xa4 - invalid */
3208/* Opcode VEX.0F 0xa5 - invalid */
3209/* Opcode VEX.0F 0xa6 - invalid */
3210/* Opcode VEX.0F 0xa7 - invalid */
3211/* Opcode VEX.0F 0xa8 - invalid */
3212/* Opcode VEX.0F 0xa9 - invalid */
3213/* Opcode VEX.0F 0xaa - invalid */
3214/* Opcode VEX.0F 0xab - invalid */
3215/* Opcode VEX.0F 0xac - invalid */
3216/* Opcode VEX.0F 0xad - invalid */
3217
3218
3219/* Opcode VEX.0F 0xae mem/0 - invalid. */
3220/* Opcode VEX.0F 0xae mem/1 - invalid. */
3221
3222/**
3223 * @ opmaps grp15
3224 * @ opcode !11/2
3225 * @ oppfx none
3226 * @ opcpuid sse
3227 * @ opgroup og_sse_mxcsrsm
3228 * @ opxcpttype 5
3229 * @ optest op1=0 -> mxcsr=0
3230 * @ optest op1=0x2083 -> mxcsr=0x2083
3231 * @ optest op1=0xfffffffe -> value.xcpt=0xd
3232 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
3233 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
3234 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
3235 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
3236 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
3237 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3238 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3239 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3240 */
3241FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
3242//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
3243//{
3244// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3245// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
3246// return IEMOP_RAISE_INVALID_OPCODE();
3247//
3248// IEM_MC_BEGIN(2, 0);
3249// IEM_MC_ARG(uint8_t, iEffSeg, 0);
3250// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3251// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3252// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3253// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3254// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3255// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
3256// IEM_MC_END();
3257// return VINF_SUCCESS;
3258//}
3259
3260
3261/**
3262 * @opmaps vexgrp15
3263 * @opcode !11/3
3264 * @oppfx none
3265 * @opcpuid avx
3266 * @opgroup og_avx_mxcsrsm
3267 * @opxcpttype 5
3268 * @optest mxcsr=0 -> op1=0
3269 * @optest mxcsr=0x2083 -> op1=0x2083
3270 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
3271 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
3272 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
3273 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
3274 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
3275 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
3276 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
3277 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
3278 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
3279 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
3280 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
3281 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
3282 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
3283 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
3284 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
3285 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
3286 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
3287 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
3288 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
3289 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
3290 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
3291 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
3292 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
3293 * -> value.xcpt=0x6
3294 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
3295 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
3296 * APMv4 rev 3.17 page 509.
3297 * @todo Test this instruction on AMD Ryzen.
3298 */
3299FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
3300{
3301 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3302 IEM_MC_BEGIN(2, 0);
3303 IEM_MC_ARG(uint8_t, iEffSeg, 0);
3304 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
3306 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3307 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3308 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
3309 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
3310 IEM_MC_END();
3311 return VINF_SUCCESS;
3312}
3313
3314/* Opcode VEX.0F 0xae mem/4 - invalid. */
3315/* Opcode VEX.0F 0xae mem/5 - invalid. */
3316/* Opcode VEX.0F 0xae mem/6 - invalid. */
3317/* Opcode VEX.0F 0xae mem/7 - invalid. */
3318
3319/* Opcode VEX.0F 0xae 11b/0 - invalid. */
3320/* Opcode VEX.0F 0xae 11b/1 - invalid. */
3321/* Opcode VEX.0F 0xae 11b/2 - invalid. */
3322/* Opcode VEX.0F 0xae 11b/3 - invalid. */
3323/* Opcode VEX.0F 0xae 11b/4 - invalid. */
3324/* Opcode VEX.0F 0xae 11b/5 - invalid. */
3325/* Opcode VEX.0F 0xae 11b/6 - invalid. */
3326/* Opcode VEX.0F 0xae 11b/7 - invalid. */
3327
3328/**
3329 * Vex group 15 jump table for memory variant.
3330 */
3331IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
3332{ /* pfx: none, 066h, 0f3h, 0f2h */
3333 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3334 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3335 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3336 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3337 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3338 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3339 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3340 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
3341};
3342AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
3343
3344
3345/** Opcode vex. 0xae. */
3346FNIEMOP_DEF(iemOp_VGrp15)
3347{
3348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3350 /* register, register */
3351 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
3352
3353 /* memory, register */
3354 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
3355 + pVCpu->iem.s.idxPrefix], bRm);
3356}
3357
3358
3359/* Opcode VEX.0F 0xaf - invalid. */
3360
3361/* Opcode VEX.0F 0xb0 - invalid. */
3362/* Opcode VEX.0F 0xb1 - invalid. */
3363/* Opcode VEX.0F 0xb2 - invalid. */
3364/* Opcode VEX.0F 0xb2 - invalid. */
3365/* Opcode VEX.0F 0xb3 - invalid. */
3366/* Opcode VEX.0F 0xb4 - invalid. */
3367/* Opcode VEX.0F 0xb5 - invalid. */
3368/* Opcode VEX.0F 0xb6 - invalid. */
3369/* Opcode VEX.0F 0xb7 - invalid. */
3370/* Opcode VEX.0F 0xb8 - invalid. */
3371/* Opcode VEX.0F 0xb9 - invalid. */
3372/* Opcode VEX.0F 0xba - invalid. */
3373/* Opcode VEX.0F 0xbb - invalid. */
3374/* Opcode VEX.0F 0xbc - invalid. */
3375/* Opcode VEX.0F 0xbd - invalid. */
3376/* Opcode VEX.0F 0xbe - invalid. */
3377/* Opcode VEX.0F 0xbf - invalid. */
3378
3379/* Opcode VEX.0F 0xc0 - invalid. */
3380/* Opcode VEX.66.0F 0xc0 - invalid. */
3381/* Opcode VEX.F3.0F 0xc0 - invalid. */
3382/* Opcode VEX.F2.0F 0xc0 - invalid. */
3383
3384/* Opcode VEX.0F 0xc1 - invalid. */
3385/* Opcode VEX.66.0F 0xc1 - invalid. */
3386/* Opcode VEX.F3.0F 0xc1 - invalid. */
3387/* Opcode VEX.F2.0F 0xc1 - invalid. */
3388
3389/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
3390FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
3391/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
3392FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
3393/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
3394FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
3395/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
3396FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
3397
3398/* Opcode VEX.0F 0xc3 - invalid */
3399/* Opcode VEX.66.0F 0xc3 - invalid */
3400/* Opcode VEX.F3.0F 0xc3 - invalid */
3401/* Opcode VEX.F2.0F 0xc3 - invalid */
3402
3403/* Opcode VEX.0F 0xc4 - invalid */
3404/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
3405FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
3406/* Opcode VEX.F3.0F 0xc4 - invalid */
3407/* Opcode VEX.F2.0F 0xc4 - invalid */
3408
3409/* Opcode VEX.0F 0xc5 - invlid */
3410/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
3411FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
3412/* Opcode VEX.F3.0F 0xc5 - invalid */
3413/* Opcode VEX.F2.0F 0xc5 - invalid */
3414
3415/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
3416FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
3417/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
3418FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
3419/* Opcode VEX.F3.0F 0xc6 - invalid */
3420/* Opcode VEX.F2.0F 0xc6 - invalid */
3421
3422/* Opcode VEX.0F 0xc7 - invalid */
3423/* Opcode VEX.66.0F 0xc7 - invalid */
3424/* Opcode VEX.F3.0F 0xc7 - invalid */
3425/* Opcode VEX.F2.0F 0xc7 - invalid */
3426
3427/* Opcode VEX.0F 0xc8 - invalid */
3428/* Opcode VEX.0F 0xc9 - invalid */
3429/* Opcode VEX.0F 0xca - invalid */
3430/* Opcode VEX.0F 0xcb - invalid */
3431/* Opcode VEX.0F 0xcc - invalid */
3432/* Opcode VEX.0F 0xcd - invalid */
3433/* Opcode VEX.0F 0xce - invalid */
3434/* Opcode VEX.0F 0xcf - invalid */
3435
3436
3437/* Opcode VEX.0F 0xd0 - invalid */
3438/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
3439FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
3440/* Opcode VEX.F3.0F 0xd0 - invalid */
3441/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
3442FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
3443
3444/* Opcode VEX.0F 0xd1 - invalid */
3445/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
3446FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
3447/* Opcode VEX.F3.0F 0xd1 - invalid */
3448/* Opcode VEX.F2.0F 0xd1 - invalid */
3449
3450/* Opcode VEX.0F 0xd2 - invalid */
3451/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
3452FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
3453/* Opcode VEX.F3.0F 0xd2 - invalid */
3454/* Opcode VEX.F2.0F 0xd2 - invalid */
3455
3456/* Opcode VEX.0F 0xd3 - invalid */
3457/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
3458FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
3459/* Opcode VEX.F3.0F 0xd3 - invalid */
3460/* Opcode VEX.F2.0F 0xd3 - invalid */
3461
3462/* Opcode VEX.0F 0xd4 - invalid */
3463/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
3464FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
3465/* Opcode VEX.F3.0F 0xd4 - invalid */
3466/* Opcode VEX.F2.0F 0xd4 - invalid */
3467
3468/* Opcode VEX.0F 0xd5 - invalid */
3469/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
3470FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
3471/* Opcode VEX.F3.0F 0xd5 - invalid */
3472/* Opcode VEX.F2.0F 0xd5 - invalid */
3473
3474/* Opcode VEX.0F 0xd6 - invalid */
3475
3476/**
3477 * @opcode 0xd6
3478 * @oppfx 0x66
3479 * @opcpuid avx
3480 * @opgroup og_avx_pcksclr_datamove
3481 * @opxcpttype none
3482 * @optest op1=-1 op2=2 -> op1=2
3483 * @optest op1=0 op2=-42 -> op1=-42
3484 */
3485FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
3486{
3487 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3490 {
3491 /*
3492 * Register, register.
3493 */
3494 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3495 IEM_MC_BEGIN(0, 0);
3496
3497 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3498 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3499
3500 IEM_MC_COPY_YREG_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3501 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 else
3506 {
3507 /*
3508 * Memory, register.
3509 */
3510 IEM_MC_BEGIN(0, 2);
3511 IEM_MC_LOCAL(uint64_t, uSrc);
3512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3513
3514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3515 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3517 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3518
3519 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3520 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3521
3522 IEM_MC_ADVANCE_RIP();
3523 IEM_MC_END();
3524 }
3525 return VINF_SUCCESS;
3526}
3527
3528/* Opcode VEX.F3.0F 0xd6 - invalid */
3529/* Opcode VEX.F2.0F 0xd6 - invalid */
3530
3531
3532/* Opcode VEX.0F 0xd7 - invalid */
3533
3534/** Opcode VEX.66.0F 0xd7 - */
3535FNIEMOP_STUB(iemOp_vpmovmskb_Gd_Ux);
3536//FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
3537//{
3538// /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
3539// /** @todo testcase: Check that the instruction implicitly clears the high
3540// * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
3541// * and opcode modifications are made to work with the whole width (not
3542// * just 128). */
3543// IEMOP_MNEMONIC(vpmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
3544// /* Docs says register only. */
3545// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3546// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
3547// {
3548// IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
3549// IEM_MC_BEGIN(2, 0);
3550// IEM_MC_ARG(uint64_t *, pDst, 0);
3551// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3552// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3553// IEM_MC_PREPARE_SSE_USAGE();
3554// IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3555// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3556// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
3557// IEM_MC_ADVANCE_RIP();
3558// IEM_MC_END();
3559// return VINF_SUCCESS;
3560// }
3561// return IEMOP_RAISE_INVALID_OPCODE();
3562//}
3563
3564/* Opcode VEX.F3.0F 0xd7 - invalid */
3565/* Opcode VEX.F2.0F 0xd7 - invalid */
3566
3567
3568/* Opcode VEX.0F 0xd8 - invalid */
3569/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
3570FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
3571/* Opcode VEX.F3.0F 0xd8 - invalid */
3572/* Opcode VEX.F2.0F 0xd8 - invalid */
3573
3574/* Opcode VEX.0F 0xd9 - invalid */
3575/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
3576FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
3577/* Opcode VEX.F3.0F 0xd9 - invalid */
3578/* Opcode VEX.F2.0F 0xd9 - invalid */
3579
3580/* Opcode VEX.0F 0xda - invalid */
3581/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
3582FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
3583/* Opcode VEX.F3.0F 0xda - invalid */
3584/* Opcode VEX.F2.0F 0xda - invalid */
3585
3586/* Opcode VEX.0F 0xdb - invalid */
3587
3588
3589/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
3590FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
3591{
3592 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3593 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3594 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3595}
3596
3597
3598/* Opcode VEX.F3.0F 0xdb - invalid */
3599/* Opcode VEX.F2.0F 0xdb - invalid */
3600
3601/* Opcode VEX.0F 0xdc - invalid */
3602/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
3603FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
3604/* Opcode VEX.F3.0F 0xdc - invalid */
3605/* Opcode VEX.F2.0F 0xdc - invalid */
3606
3607/* Opcode VEX.0F 0xdd - invalid */
3608/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
3609FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
3610/* Opcode VEX.F3.0F 0xdd - invalid */
3611/* Opcode VEX.F2.0F 0xdd - invalid */
3612
3613/* Opcode VEX.0F 0xde - invalid */
3614/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
3615FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
3616/* Opcode VEX.F3.0F 0xde - invalid */
3617/* Opcode VEX.F2.0F 0xde - invalid */
3618
3619/* Opcode VEX.0F 0xdf - invalid */
3620
3621
3622/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
3623FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
3624{
3625 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3626 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3627 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3628}
3629
3630
3631/* Opcode VEX.F3.0F 0xdf - invalid */
3632/* Opcode VEX.F2.0F 0xdf - invalid */
3633
3634/* Opcode VEX.0F 0xe0 - invalid */
3635/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
3636FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
3637/* Opcode VEX.F3.0F 0xe0 - invalid */
3638/* Opcode VEX.F2.0F 0xe0 - invalid */
3639
3640/* Opcode VEX.0F 0xe1 - invalid */
3641/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
3642FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
3643/* Opcode VEX.F3.0F 0xe1 - invalid */
3644/* Opcode VEX.F2.0F 0xe1 - invalid */
3645
3646/* Opcode VEX.0F 0xe2 - invalid */
3647/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
3648FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
3649/* Opcode VEX.F3.0F 0xe2 - invalid */
3650/* Opcode VEX.F2.0F 0xe2 - invalid */
3651
3652/* Opcode VEX.0F 0xe3 - invalid */
3653/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
3654FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
3655/* Opcode VEX.F3.0F 0xe3 - invalid */
3656/* Opcode VEX.F2.0F 0xe3 - invalid */
3657
3658/* Opcode VEX.0F 0xe4 - invalid */
3659/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
3660FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
3661/* Opcode VEX.F3.0F 0xe4 - invalid */
3662/* Opcode VEX.F2.0F 0xe4 - invalid */
3663
3664/* Opcode VEX.0F 0xe5 - invalid */
3665/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
3666FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
3667/* Opcode VEX.F3.0F 0xe5 - invalid */
3668/* Opcode VEX.F2.0F 0xe5 - invalid */
3669
3670/* Opcode VEX.0F 0xe6 - invalid */
3671/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
3672FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
3673/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
3674FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
3675/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
3676FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
3677
3678
3679/* Opcode VEX.0F 0xe7 - invalid */
3680
3681/**
3682 * @opcode 0xe7
3683 * @opcodesub !11 mr/reg
3684 * @oppfx 0x66
3685 * @opcpuid avx
3686 * @opgroup og_avx_cachect
3687 * @opxcpttype 1
3688 * @optest op1=-1 op2=2 -> op1=2
3689 * @optest op1=0 op2=-42 -> op1=-42
3690 */
3691FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
3692{
3693 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
3694 Assert(pVCpu->iem.s.uVexLength <= 1);
3695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3696 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3697 {
3698 if (pVCpu->iem.s.uVexLength == 0)
3699 {
3700 /*
3701 * 128-bit: Memory, register.
3702 */
3703 IEM_MC_BEGIN(0, 2);
3704 IEM_MC_LOCAL(RTUINT128U, uSrc);
3705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3706
3707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3708 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3709 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3710 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3711
3712 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3713 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3714
3715 IEM_MC_ADVANCE_RIP();
3716 IEM_MC_END();
3717 }
3718 else
3719 {
3720 /*
3721 * 256-bit: Memory, register.
3722 */
3723 IEM_MC_BEGIN(0, 2);
3724 IEM_MC_LOCAL(RTUINT256U, uSrc);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3726
3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3728 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3730 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3731
3732 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3733 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3734
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 }
3738 return VINF_SUCCESS;
3739 }
3740 /**
3741 * @opdone
3742 * @opmnemonic udvex660fe7reg
3743 * @opcode 0xe7
3744 * @opcodesub 11 mr/reg
3745 * @oppfx 0x66
3746 * @opunused immediate
3747 * @opcpuid avx
3748 * @optest ->
3749 */
3750 return IEMOP_RAISE_INVALID_OPCODE();
3751}
3752
3753/* Opcode VEX.F3.0F 0xe7 - invalid */
3754/* Opcode VEX.F2.0F 0xe7 - invalid */
3755
3756
3757/* Opcode VEX.0F 0xe8 - invalid */
3758/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
3759FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
3760/* Opcode VEX.F3.0F 0xe8 - invalid */
3761/* Opcode VEX.F2.0F 0xe8 - invalid */
3762
3763/* Opcode VEX.0F 0xe9 - invalid */
3764/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
3765FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
3766/* Opcode VEX.F3.0F 0xe9 - invalid */
3767/* Opcode VEX.F2.0F 0xe9 - invalid */
3768
3769/* Opcode VEX.0F 0xea - invalid */
3770/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
3771FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
3772/* Opcode VEX.F3.0F 0xea - invalid */
3773/* Opcode VEX.F2.0F 0xea - invalid */
3774
3775/* Opcode VEX.0F 0xeb - invalid */
3776
3777
3778/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
3779FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
3780{
3781 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3782 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3783 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3784}
3785
3786
3787
3788/* Opcode VEX.F3.0F 0xeb - invalid */
3789/* Opcode VEX.F2.0F 0xeb - invalid */
3790
3791/* Opcode VEX.0F 0xec - invalid */
3792/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
3793FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
3794/* Opcode VEX.F3.0F 0xec - invalid */
3795/* Opcode VEX.F2.0F 0xec - invalid */
3796
3797/* Opcode VEX.0F 0xed - invalid */
3798/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
3799FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
3800/* Opcode VEX.F3.0F 0xed - invalid */
3801/* Opcode VEX.F2.0F 0xed - invalid */
3802
3803/* Opcode VEX.0F 0xee - invalid */
3804/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
3805FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
3806/* Opcode VEX.F3.0F 0xee - invalid */
3807/* Opcode VEX.F2.0F 0xee - invalid */
3808
3809
3810/* Opcode VEX.0F 0xef - invalid */
3811
3812/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
3813FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
3814{
3815 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3816 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
3817 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3818}
3819
3820/* Opcode VEX.F3.0F 0xef - invalid */
3821/* Opcode VEX.F2.0F 0xef - invalid */
3822
3823/* Opcode VEX.0F 0xf0 - invalid */
3824/* Opcode VEX.66.0F 0xf0 - invalid */
3825/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
3826FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
3827
3828/* Opcode VEX.0F 0xf1 - invalid */
3829/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
3830FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
3831/* Opcode VEX.F2.0F 0xf1 - invalid */
3832
3833/* Opcode VEX.0F 0xf2 - invalid */
3834/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
3835FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
3836/* Opcode VEX.F2.0F 0xf2 - invalid */
3837
3838/* Opcode VEX.0F 0xf3 - invalid */
3839/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
3840FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
3841/* Opcode VEX.F2.0F 0xf3 - invalid */
3842
3843/* Opcode VEX.0F 0xf4 - invalid */
3844/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
3845FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
3846/* Opcode VEX.F2.0F 0xf4 - invalid */
3847
3848/* Opcode VEX.0F 0xf5 - invalid */
3849/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
3850FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
3851/* Opcode VEX.F2.0F 0xf5 - invalid */
3852
3853/* Opcode VEX.0F 0xf6 - invalid */
3854/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
3855FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
3856/* Opcode VEX.F2.0F 0xf6 - invalid */
3857
3858/* Opcode VEX.0F 0xf7 - invalid */
3859/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
3860FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
3861/* Opcode VEX.F2.0F 0xf7 - invalid */
3862
3863/* Opcode VEX.0F 0xf8 - invalid */
3864/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
3865FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
3866/* Opcode VEX.F2.0F 0xf8 - invalid */
3867
3868/* Opcode VEX.0F 0xf9 - invalid */
3869/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
3870FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
3871/* Opcode VEX.F2.0F 0xf9 - invalid */
3872
3873/* Opcode VEX.0F 0xfa - invalid */
3874/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
3875FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
3876/* Opcode VEX.F2.0F 0xfa - invalid */
3877
3878/* Opcode VEX.0F 0xfb - invalid */
3879/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
3880FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
3881/* Opcode VEX.F2.0F 0xfb - invalid */
3882
3883/* Opcode VEX.0F 0xfc - invalid */
3884/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
3885FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
3886/* Opcode VEX.F2.0F 0xfc - invalid */
3887
3888/* Opcode VEX.0F 0xfd - invalid */
3889/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
3890FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
3891/* Opcode VEX.F2.0F 0xfd - invalid */
3892
3893/* Opcode VEX.0F 0xfe - invalid */
3894/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
3895FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
3896/* Opcode VEX.F2.0F 0xfe - invalid */
3897
3898
3899/** Opcode **** 0x0f 0xff - UD0 */
3900FNIEMOP_DEF(iemOp_vud0)
3901{
3902 IEMOP_MNEMONIC(vud0, "vud0");
3903 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
3904 {
3905 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
3906#ifndef TST_IEM_CHECK_MC
3907 RTGCPTR GCPtrEff;
3908 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
3909 if (rcStrict != VINF_SUCCESS)
3910 return rcStrict;
3911#endif
3912 IEMOP_HLP_DONE_DECODING();
3913 }
3914 return IEMOP_RAISE_INVALID_OPCODE();
3915}
3916
3917
3918
3919/**
3920 * VEX opcode map \#1.
3921 *
3922 * @sa g_apfnTwoByteMap
3923 */
3924IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
3925{
3926 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3927 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
3928 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
3929 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
3930 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
3931 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
3932 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
3933 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
3934 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
3935 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
3936 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
3937 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
3938 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
3939 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
3940 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
3941 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
3942 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
3943
3944 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
3945 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
3946 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
3947 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3948 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3949 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3950 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
3951 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3952 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
3953 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
3954 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
3955 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3956 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
3957 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
3958 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
3959 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3960
3961 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
3962 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
3963 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
3964 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
3965 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
3966 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
3967 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3968 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3969 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3970 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3971 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
3972 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3973 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
3974 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
3975 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3976 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3977
3978 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
3979 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
3980 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
3981 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
3982 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
3983 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
3984 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
3985 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
3986 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3987 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3988 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3989 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3990 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3991 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3992 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3993 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3994
3995 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
3996 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
3997 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3998 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3999 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
4000 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
4001 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
4002 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
4003 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
4004 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
4005 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
4006 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
4007 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
4008 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
4009 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
4010 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
4011
4012 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4013 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
4014 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4015 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
4016 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4017 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4018 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4019 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4020 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
4021 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
4022 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
4023 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
4024 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
4025 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
4026 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
4027 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
4028
4029 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4030 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4031 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4032 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4033 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4034 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4035 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4036 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4037 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4038 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4039 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4040 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4041 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4042 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4043 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4044 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
4045
4046 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
4047 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4048 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4049 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4050 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4051 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4052 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4053 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4054 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
4055 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
4056 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
4057 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
4058 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
4059 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
4060 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
4061 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
4062
4063 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
4064 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
4065 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
4066 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
4067 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
4068 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
4069 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
4070 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
4071 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
4072 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
4073 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
4074 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
4075 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
4076 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
4077 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
4078 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
4079
4080 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
4081 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
4082 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
4083 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
4084 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
4085 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
4086 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
4087 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
4088 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
4089 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
4090 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
4091 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
4092 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
4093 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
4094 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
4095 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
4096
4097 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4098 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4099 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4100 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4101 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4102 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4103 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4104 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4105 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4106 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4107 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
4108 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
4109 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
4110 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
4111 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
4112 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
4113
4114 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4115 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4116 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
4117 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4118 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
4119 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
4120 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
4121 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4122 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4123 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4124 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
4125 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
4126 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
4127 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
4128 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
4129 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
4130
4131 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
4132 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
4133 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
4134 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
4135 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4136 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
4137 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
4138 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
4139 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
4140 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
4141 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
4142 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
4143 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
4144 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
4145 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
4146 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
4147
4148 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
4149 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4150 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4151 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4152 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4153 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4154 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4155 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4156 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4157 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4158 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4159 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4160 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4161 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4162 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4163 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4164
4165 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4166 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4167 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4168 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4169 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4170 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4171 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
4172 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4173 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4174 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4175 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4176 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4177 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4178 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4179 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4180 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4181
4182 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
4183 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4184 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4185 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4186 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4187 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4188 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4189 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4190 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4191 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4192 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4193 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4194 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4195 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4196 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
4197 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
4198};
4199AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
4200/** @} */
4201
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette