VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 96947

Last change on this file since 96947 was 96947, checked in by vboxsync, 2 years ago

IEM: Added forgotten FETCH_EFLAGS in AVX variants of xCOMISx instructions.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 202.4 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 96947 2022-09-30 06:56:06Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
54 IEM_MC_BEGIN(4, 3);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
74 IEM_MC_BEGIN(4, 0);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 }
144 }
145 return VINF_SUCCESS;
146}
147
148
149/**
150 * Common worker for AVX2 instructions on the forms:
151 * - vpxxx xmm0, xmm1, xmm2/mem128
152 * - vpxxx ymm0, ymm1, ymm2/mem256
153 *
154 * Takes function table for function w/o implicit state parameter.
155 *
156 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
157 */
158FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
159{
160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
161 if (IEM_IS_MODRM_REG_MODE(bRm))
162 {
163 /*
164 * Register, register.
165 */
166 if (pVCpu->iem.s.uVexLength)
167 {
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_BEGIN(3, 3);
170 IEM_MC_LOCAL(RTUINT256U, uDst);
171 IEM_MC_LOCAL(RTUINT256U, uSrc1);
172 IEM_MC_LOCAL(RTUINT256U, uSrc2);
173 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
175 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
176 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
177 IEM_MC_PREPARE_AVX_USAGE();
178 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
179 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
181 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 }
185 else
186 {
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_BEGIN(3, 0);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
191 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
192 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
193 IEM_MC_PREPARE_AVX_USAGE();
194 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
195 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
196 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
198 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_ADVANCE_RIP();
200 IEM_MC_END();
201 }
202 }
203 else
204 {
205 /*
206 * Register, memory.
207 */
208 if (pVCpu->iem.s.uVexLength)
209 {
210 IEM_MC_BEGIN(3, 4);
211 IEM_MC_LOCAL(RTUINT256U, uDst);
212 IEM_MC_LOCAL(RTUINT256U, uSrc1);
213 IEM_MC_LOCAL(RTUINT256U, uSrc2);
214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
215 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
217 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
218
219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
220 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
221 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
222 IEM_MC_PREPARE_AVX_USAGE();
223
224 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
225 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
227 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
228
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 IEM_MC_BEGIN(3, 2);
235 IEM_MC_LOCAL(RTUINT128U, uSrc2);
236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
237 IEM_MC_ARG(PRTUINT128U, puDst, 0);
238 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
239 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
240
241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
244 IEM_MC_PREPARE_AVX_USAGE();
245
246 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
248 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
250 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 }
256 return VINF_SUCCESS;
257}
258
259
260/**
261 * Common worker for AVX2 instructions on the forms:
262 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
263 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
264 *
265 * The 128-bit memory version of this instruction may elect to skip fetching the
266 * lower 64 bits of the operand. We, however, do not.
267 *
268 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
271{
272 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
273}
274
275
276/**
277 * Common worker for AVX2 instructions on the forms:
278 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
279 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
280 *
281 * The 128-bit memory version of this instruction may elect to skip fetching the
282 * higher 64 bits of the operand. We, however, do not.
283 *
284 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
285 */
286FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
287{
288 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
289}
290
291
292/**
293 * Common worker for AVX2 instructions on the forms:
294 * - vpxxx xmm0, xmm1/mem128
295 * - vpxxx ymm0, ymm1/mem256
296 *
297 * Takes function table for function w/o implicit state parameter.
298 *
299 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
300 */
301FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
302{
303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
304 if (IEM_IS_MODRM_REG_MODE(bRm))
305 {
306 /*
307 * Register, register.
308 */
309 if (pVCpu->iem.s.uVexLength)
310 {
311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
312 IEM_MC_BEGIN(2, 2);
313 IEM_MC_LOCAL(RTUINT256U, uDst);
314 IEM_MC_LOCAL(RTUINT256U, uSrc);
315 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
316 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
317 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
318 IEM_MC_PREPARE_AVX_USAGE();
319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
320 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
321 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
322 IEM_MC_ADVANCE_RIP();
323 IEM_MC_END();
324 }
325 else
326 {
327 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
328 IEM_MC_BEGIN(2, 0);
329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
331 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
332 IEM_MC_PREPARE_AVX_USAGE();
333 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
334 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
335 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
336 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_ADVANCE_RIP();
338 IEM_MC_END();
339 }
340 }
341 else
342 {
343 /*
344 * Register, memory.
345 */
346 if (pVCpu->iem.s.uVexLength)
347 {
348 IEM_MC_BEGIN(2, 3);
349 IEM_MC_LOCAL(RTUINT256U, uDst);
350 IEM_MC_LOCAL(RTUINT256U, uSrc);
351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
352 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
353 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
354
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
356 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
357 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
358 IEM_MC_PREPARE_AVX_USAGE();
359
360 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
361 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
362 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
363
364 IEM_MC_ADVANCE_RIP();
365 IEM_MC_END();
366 }
367 else
368 {
369 IEM_MC_BEGIN(2, 2);
370 IEM_MC_LOCAL(RTUINT128U, uSrc);
371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
373 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
374
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
377 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
378 IEM_MC_PREPARE_AVX_USAGE();
379
380 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
382 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
383 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
384
385 IEM_MC_ADVANCE_RIP();
386 IEM_MC_END();
387 }
388 }
389 return VINF_SUCCESS;
390}
391
392
393/* Opcode VEX.0F 0x00 - invalid */
394/* Opcode VEX.0F 0x01 - invalid */
395/* Opcode VEX.0F 0x02 - invalid */
396/* Opcode VEX.0F 0x03 - invalid */
397/* Opcode VEX.0F 0x04 - invalid */
398/* Opcode VEX.0F 0x05 - invalid */
399/* Opcode VEX.0F 0x06 - invalid */
400/* Opcode VEX.0F 0x07 - invalid */
401/* Opcode VEX.0F 0x08 - invalid */
402/* Opcode VEX.0F 0x09 - invalid */
403/* Opcode VEX.0F 0x0a - invalid */
404
405/** Opcode VEX.0F 0x0b. */
406FNIEMOP_DEF(iemOp_vud2)
407{
408 IEMOP_MNEMONIC(vud2, "vud2");
409 return IEMOP_RAISE_INVALID_OPCODE();
410}
411
412/* Opcode VEX.0F 0x0c - invalid */
413/* Opcode VEX.0F 0x0d - invalid */
414/* Opcode VEX.0F 0x0e - invalid */
415/* Opcode VEX.0F 0x0f - invalid */
416
417
418/**
419 * @opcode 0x10
420 * @oppfx none
421 * @opcpuid avx
422 * @opgroup og_avx_simdfp_datamove
423 * @opxcpttype 4UA
424 * @optest op1=1 op2=2 -> op1=2
425 * @optest op1=0 op2=-22 -> op1=-22
426 */
427FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
428{
429 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
430 Assert(pVCpu->iem.s.uVexLength <= 1);
431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
432 if (IEM_IS_MODRM_REG_MODE(bRm))
433 {
434 /*
435 * Register, register.
436 */
437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
438 IEM_MC_BEGIN(0, 0);
439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
441 if (pVCpu->iem.s.uVexLength == 0)
442 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 else
445 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 IEM_MC_ADVANCE_RIP();
448 IEM_MC_END();
449 }
450 else if (pVCpu->iem.s.uVexLength == 0)
451 {
452 /*
453 * 128-bit: Register, Memory
454 */
455 IEM_MC_BEGIN(0, 2);
456 IEM_MC_LOCAL(RTUINT128U, uSrc);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
460 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
461 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
462 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
463
464 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
465 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 }
470 else
471 {
472 /*
473 * 256-bit: Register, Memory
474 */
475 IEM_MC_BEGIN(0, 2);
476 IEM_MC_LOCAL(RTUINT256U, uSrc);
477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
478
479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
480 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
483
484 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
485 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
486
487 IEM_MC_ADVANCE_RIP();
488 IEM_MC_END();
489 }
490 return VINF_SUCCESS;
491}
492
493
494/**
495 * @opcode 0x10
496 * @oppfx 0x66
497 * @opcpuid avx
498 * @opgroup og_avx_simdfp_datamove
499 * @opxcpttype 4UA
500 * @optest op1=1 op2=2 -> op1=2
501 * @optest op1=0 op2=-22 -> op1=-22
502 */
503FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
504{
505 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
506 Assert(pVCpu->iem.s.uVexLength <= 1);
507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
508 if (IEM_IS_MODRM_REG_MODE(bRm))
509 {
510 /*
511 * Register, register.
512 */
513 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
514 IEM_MC_BEGIN(0, 0);
515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
516 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
517 if (pVCpu->iem.s.uVexLength == 0)
518 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
519 IEM_GET_MODRM_RM(pVCpu, bRm));
520 else
521 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
522 IEM_GET_MODRM_RM(pVCpu, bRm));
523 IEM_MC_ADVANCE_RIP();
524 IEM_MC_END();
525 }
526 else if (pVCpu->iem.s.uVexLength == 0)
527 {
528 /*
529 * 128-bit: Memory, register.
530 */
531 IEM_MC_BEGIN(0, 2);
532 IEM_MC_LOCAL(RTUINT128U, uSrc);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
539
540 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
541 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
542
543 IEM_MC_ADVANCE_RIP();
544 IEM_MC_END();
545 }
546 else
547 {
548 /*
549 * 256-bit: Memory, register.
550 */
551 IEM_MC_BEGIN(0, 2);
552 IEM_MC_LOCAL(RTUINT256U, uSrc);
553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
554
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
556 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
559
560 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
561 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
562
563 IEM_MC_ADVANCE_RIP();
564 IEM_MC_END();
565 }
566 return VINF_SUCCESS;
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEMOP_HLP_DONE_VEX_DECODING();
591 IEM_MC_BEGIN(0, 0);
592
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(0, 2);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP();
629 IEM_MC_END();
630 }
631
632 return VINF_SUCCESS;
633}
634
635
636FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
637{
638 Assert(pVCpu->iem.s.uVexLength <= 1);
639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
640 if (IEM_IS_MODRM_REG_MODE(bRm))
641 {
642 /**
643 * @opcode 0x10
644 * @oppfx 0xf2
645 * @opcodesub 11 mr/reg
646 * @opcpuid avx
647 * @opgroup og_avx_simdfp_datamerge
648 * @opxcpttype 5
649 * @optest op1=1 op2=0 op3=2 -> op1=2
650 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
651 * @optest op1=3 op2=-1 op3=0x77 ->
652 * op1=0xffffffffffffffff0000000000000077
653 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
654 */
655 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
656 IEMOP_HLP_DONE_VEX_DECODING();
657 IEM_MC_BEGIN(0, 0);
658
659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
660 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
661 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
662 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
663 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
664 IEM_MC_ADVANCE_RIP();
665 IEM_MC_END();
666 }
667 else
668 {
669 /**
670 * @opdone
671 * @opcode 0x10
672 * @oppfx 0xf2
673 * @opcodesub !11 mr/reg
674 * @opcpuid avx
675 * @opgroup og_avx_simdfp_datamove
676 * @opxcpttype 5
677 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
678 * @optest op1=1 op2=2 -> op1=2
679 * @optest op1=0 op2=-22 -> op1=-22
680 */
681 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
682 IEM_MC_BEGIN(0, 2);
683 IEM_MC_LOCAL(uint64_t, uSrc);
684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
685
686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
689 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
690
691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
692 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
693
694 IEM_MC_ADVANCE_RIP();
695 IEM_MC_END();
696 }
697
698 return VINF_SUCCESS;
699}
700
701
702/**
703 * @opcode 0x11
704 * @oppfx none
705 * @opcpuid avx
706 * @opgroup og_avx_simdfp_datamove
707 * @opxcpttype 4UA
708 * @optest op1=1 op2=2 -> op1=2
709 * @optest op1=0 op2=-22 -> op1=-22
710 */
711FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
712{
713 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
714 Assert(pVCpu->iem.s.uVexLength <= 1);
715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
716 if (IEM_IS_MODRM_REG_MODE(bRm))
717 {
718 /*
719 * Register, register.
720 */
721 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
722 IEM_MC_BEGIN(0, 0);
723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
724 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
725 if (pVCpu->iem.s.uVexLength == 0)
726 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
727 IEM_GET_MODRM_REG(pVCpu, bRm));
728 else
729 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
730 IEM_GET_MODRM_REG(pVCpu, bRm));
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 else if (pVCpu->iem.s.uVexLength == 0)
735 {
736 /*
737 * 128-bit: Memory, register.
738 */
739 IEM_MC_BEGIN(0, 2);
740 IEM_MC_LOCAL(RTUINT128U, uSrc);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
747
748 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
749 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
750
751 IEM_MC_ADVANCE_RIP();
752 IEM_MC_END();
753 }
754 else
755 {
756 /*
757 * 256-bit: Memory, register.
758 */
759 IEM_MC_BEGIN(0, 2);
760 IEM_MC_LOCAL(RTUINT256U, uSrc);
761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
762
763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
766 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
767
768 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
769 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
770
771 IEM_MC_ADVANCE_RIP();
772 IEM_MC_END();
773 }
774 return VINF_SUCCESS;
775}
776
777
778/**
779 * @opcode 0x11
780 * @oppfx 0x66
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 4UA
784 * @optest op1=1 op2=2 -> op1=2
785 * @optest op1=0 op2=-22 -> op1=-22
786 */
787FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
788{
789 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
790 Assert(pVCpu->iem.s.uVexLength <= 1);
791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
792 if (IEM_IS_MODRM_REG_MODE(bRm))
793 {
794 /*
795 * Register, register.
796 */
797 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
798 IEM_MC_BEGIN(0, 0);
799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
801 if (pVCpu->iem.s.uVexLength == 0)
802 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
803 IEM_GET_MODRM_REG(pVCpu, bRm));
804 else
805 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
806 IEM_GET_MODRM_REG(pVCpu, bRm));
807 IEM_MC_ADVANCE_RIP();
808 IEM_MC_END();
809 }
810 else if (pVCpu->iem.s.uVexLength == 0)
811 {
812 /*
813 * 128-bit: Memory, register.
814 */
815 IEM_MC_BEGIN(0, 2);
816 IEM_MC_LOCAL(RTUINT128U, uSrc);
817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
818
819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
823
824 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 else
831 {
832 /*
833 * 256-bit: Memory, register.
834 */
835 IEM_MC_BEGIN(0, 2);
836 IEM_MC_LOCAL(RTUINT256U, uSrc);
837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
838
839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
840 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
841 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
842 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
843
844 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
846
847 IEM_MC_ADVANCE_RIP();
848 IEM_MC_END();
849 }
850 return VINF_SUCCESS;
851}
852
853
854FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
855{
856 Assert(pVCpu->iem.s.uVexLength <= 1);
857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
858 if (IEM_IS_MODRM_REG_MODE(bRm))
859 {
860 /**
861 * @opcode 0x11
862 * @oppfx 0xf3
863 * @opcodesub 11 mr/reg
864 * @opcpuid avx
865 * @opgroup og_avx_simdfp_datamerge
866 * @opxcpttype 5
867 * @optest op1=1 op2=0 op3=2 -> op1=2
868 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
869 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
870 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
871 */
872 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
873 IEMOP_HLP_DONE_VEX_DECODING();
874 IEM_MC_BEGIN(0, 0);
875
876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
877 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
878 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
879 IEM_GET_MODRM_REG(pVCpu, bRm),
880 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
881 IEM_MC_ADVANCE_RIP();
882 IEM_MC_END();
883 }
884 else
885 {
886 /**
887 * @opdone
888 * @opcode 0x11
889 * @oppfx 0xf3
890 * @opcodesub !11 mr/reg
891 * @opcpuid avx
892 * @opgroup og_avx_simdfp_datamove
893 * @opxcpttype 5
894 * @opfunction iemOp_vmovss_Vss_Hss_Wss
895 * @optest op1=1 op2=2 -> op1=2
896 * @optest op1=0 op2=-22 -> op1=-22
897 */
898 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
899 IEM_MC_BEGIN(0, 2);
900 IEM_MC_LOCAL(uint32_t, uSrc);
901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
902
903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
907
908 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
910
911 IEM_MC_ADVANCE_RIP();
912 IEM_MC_END();
913 }
914
915 return VINF_SUCCESS;
916}
917
918
919FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
920{
921 Assert(pVCpu->iem.s.uVexLength <= 1);
922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
923 if (IEM_IS_MODRM_REG_MODE(bRm))
924 {
925 /**
926 * @opcode 0x11
927 * @oppfx 0xf2
928 * @opcodesub 11 mr/reg
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamerge
931 * @opxcpttype 5
932 * @optest op1=1 op2=0 op3=2 -> op1=2
933 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
934 * @optest op1=3 op2=-1 op3=0x77 ->
935 * op1=0xffffffffffffffff0000000000000077
936 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
937 */
938 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
939 IEMOP_HLP_DONE_VEX_DECODING();
940 IEM_MC_BEGIN(0, 0);
941
942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
943 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
944 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
945 IEM_GET_MODRM_REG(pVCpu, bRm),
946 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
947 IEM_MC_ADVANCE_RIP();
948 IEM_MC_END();
949 }
950 else
951 {
952 /**
953 * @opdone
954 * @opcode 0x11
955 * @oppfx 0xf2
956 * @opcodesub !11 mr/reg
957 * @opcpuid avx
958 * @opgroup og_avx_simdfp_datamove
959 * @opxcpttype 5
960 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
961 * @optest op1=1 op2=2 -> op1=2
962 * @optest op1=0 op2=-22 -> op1=-22
963 */
964 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
973
974 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
975 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
976
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 }
980
981 return VINF_SUCCESS;
982}
983
984
985FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
986{
987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
988 if (IEM_IS_MODRM_REG_MODE(bRm))
989 {
990 /**
991 * @opcode 0x12
992 * @opcodesub 11 mr/reg
993 * @oppfx none
994 * @opcpuid avx
995 * @opgroup og_avx_simdfp_datamerge
996 * @opxcpttype 7LZ
997 * @optest op2=0x2200220122022203
998 * op3=0x3304330533063307
999 * -> op1=0x22002201220222033304330533063307
1000 * @optest op2=-1 op3=-42 -> op1=-42
1001 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1002 */
1003 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1004
1005 IEMOP_HLP_DONE_VEX_DECODING_L0();
1006 IEM_MC_BEGIN(0, 0);
1007
1008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1010 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1011 IEM_GET_MODRM_RM(pVCpu, bRm),
1012 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1013
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 }
1017 else
1018 {
1019 /**
1020 * @opdone
1021 * @opcode 0x12
1022 * @opcodesub !11 mr/reg
1023 * @oppfx none
1024 * @opcpuid avx
1025 * @opgroup og_avx_simdfp_datamove
1026 * @opxcpttype 5LZ
1027 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1028 * @optest op1=1 op2=0 op3=0 -> op1=0
1029 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1030 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1031 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1032 */
1033 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1034
1035 IEM_MC_BEGIN(0, 2);
1036 IEM_MC_LOCAL(uint64_t, uSrc);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DONE_VEX_DECODING_L0();
1041 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1042 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1043
1044 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1045 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1046 uSrc,
1047 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/**
1057 * @opcode 0x12
1058 * @opcodesub !11 mr/reg
1059 * @oppfx 0x66
1060 * @opcpuid avx
1061 * @opgroup og_avx_pcksclr_datamerge
1062 * @opxcpttype 5LZ
1063 * @optest op2=0 op3=2 -> op1=2
1064 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1065 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1066 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1067 */
1068FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_MEM_MODE(bRm))
1072 {
1073 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1074
1075 IEM_MC_BEGIN(0, 2);
1076 IEM_MC_LOCAL(uint64_t, uSrc);
1077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1078
1079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1080 IEMOP_HLP_DONE_VEX_DECODING_L0();
1081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1083
1084 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1085 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1086 uSrc,
1087 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1088
1089 IEM_MC_ADVANCE_RIP();
1090 IEM_MC_END();
1091 return VINF_SUCCESS;
1092 }
1093
1094 /**
1095 * @opdone
1096 * @opmnemonic udvex660f12m3
1097 * @opcode 0x12
1098 * @opcodesub 11 mr/reg
1099 * @oppfx 0x66
1100 * @opunused immediate
1101 * @opcpuid avx
1102 * @optest ->
1103 */
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105}
1106
1107
1108/**
1109 * @opcode 0x12
1110 * @oppfx 0xf3
1111 * @opcpuid avx
1112 * @opgroup og_avx_pcksclr_datamove
1113 * @opxcpttype 4
1114 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1115 * -> op1=0x00000002000000020000000100000001
1116 * @optest vex.l==1 /
1117 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1118 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1119 */
1120FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1121{
1122 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1123 Assert(pVCpu->iem.s.uVexLength <= 1);
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if (IEM_IS_MODRM_REG_MODE(bRm))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1131 if (pVCpu->iem.s.uVexLength == 0)
1132 {
1133 IEM_MC_BEGIN(2, 0);
1134 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1136
1137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1138 IEM_MC_PREPARE_AVX_USAGE();
1139
1140 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1142 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1143 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1144
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(3, 0);
1151 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1152 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1158
1159 IEM_MC_ADVANCE_RIP();
1160 IEM_MC_END();
1161 }
1162 }
1163 else
1164 {
1165 /*
1166 * Register, memory.
1167 */
1168 if (pVCpu->iem.s.uVexLength == 0)
1169 {
1170 IEM_MC_BEGIN(2, 2);
1171 IEM_MC_LOCAL(RTUINT128U, uSrc);
1172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1174 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1175
1176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1177 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1179 IEM_MC_PREPARE_AVX_USAGE();
1180
1181 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1182 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1184 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1185
1186 IEM_MC_ADVANCE_RIP();
1187 IEM_MC_END();
1188 }
1189 else
1190 {
1191 IEM_MC_BEGIN(3, 2);
1192 IEM_MC_LOCAL(RTUINT256U, uSrc);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1195 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1196 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1197
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1201 IEM_MC_PREPARE_AVX_USAGE();
1202
1203 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1204 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1205
1206 IEM_MC_ADVANCE_RIP();
1207 IEM_MC_END();
1208 }
1209 }
1210 return VINF_SUCCESS;
1211}
1212
1213
1214/**
1215 * @opcode 0x12
1216 * @oppfx 0xf2
1217 * @opcpuid avx
1218 * @opgroup og_avx_pcksclr_datamove
1219 * @opxcpttype 5
1220 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1221 * -> op1=0x22222222111111112222222211111111
1222 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1223 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1224 */
1225FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1226{
1227 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if (IEM_IS_MODRM_REG_MODE(bRm))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(2, 0);
1238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1239 IEM_MC_ARG(uint64_t, uSrc, 1);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1245 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1246 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(3, 0);
1255 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1256 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1257 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1258
1259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1260 IEM_MC_PREPARE_AVX_USAGE();
1261 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1262
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 }
1267 else
1268 {
1269 /*
1270 * Register, memory.
1271 */
1272 if (pVCpu->iem.s.uVexLength == 0)
1273 {
1274 IEM_MC_BEGIN(2, 2);
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1277 IEM_MC_ARG(uint64_t, uSrc, 1);
1278
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1280 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1281 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1282 IEM_MC_PREPARE_AVX_USAGE();
1283
1284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1287 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1288
1289 IEM_MC_ADVANCE_RIP();
1290 IEM_MC_END();
1291 }
1292 else
1293 {
1294 IEM_MC_BEGIN(3, 2);
1295 IEM_MC_LOCAL(RTUINT256U, uSrc);
1296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1297 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1298 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1299 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1300
1301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1302 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1304 IEM_MC_PREPARE_AVX_USAGE();
1305
1306 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1307 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1308
1309 IEM_MC_ADVANCE_RIP();
1310 IEM_MC_END();
1311 }
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317/**
1318 * @opcode 0x13
1319 * @opcodesub !11 mr/reg
1320 * @oppfx none
1321 * @opcpuid avx
1322 * @opgroup og_avx_simdfp_datamove
1323 * @opxcpttype 5
1324 * @optest op1=1 op2=2 -> op1=2
1325 * @optest op1=0 op2=-42 -> op1=-42
1326 */
1327FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1328{
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 if (IEM_IS_MODRM_MEM_MODE(bRm))
1331 {
1332 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1333
1334 IEM_MC_BEGIN(0, 2);
1335 IEM_MC_LOCAL(uint64_t, uSrc);
1336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1337
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1339 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1342
1343 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1344 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1345
1346 IEM_MC_ADVANCE_RIP();
1347 IEM_MC_END();
1348 return VINF_SUCCESS;
1349 }
1350
1351 /**
1352 * @opdone
1353 * @opmnemonic udvex0f13m3
1354 * @opcode 0x13
1355 * @opcodesub 11 mr/reg
1356 * @oppfx none
1357 * @opunused immediate
1358 * @opcpuid avx
1359 * @optest ->
1360 */
1361 return IEMOP_RAISE_INVALID_OPCODE();
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opcodesub !11 mr/reg
1368 * @oppfx 0x66
1369 * @opcpuid avx
1370 * @opgroup og_avx_pcksclr_datamove
1371 * @opxcpttype 5
1372 * @optest op1=1 op2=2 -> op1=2
1373 * @optest op1=0 op2=-42 -> op1=-42
1374 */
1375FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 if (IEM_IS_MODRM_MEM_MODE(bRm))
1379 {
1380 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1381 IEM_MC_BEGIN(0, 2);
1382 IEM_MC_LOCAL(uint64_t, uSrc);
1383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1384
1385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1389
1390 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1391 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1392
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 return VINF_SUCCESS;
1396 }
1397
1398 /**
1399 * @opdone
1400 * @opmnemonic udvex660f13m3
1401 * @opcode 0x13
1402 * @opcodesub 11 mr/reg
1403 * @oppfx 0x66
1404 * @opunused immediate
1405 * @opcpuid avx
1406 * @optest ->
1407 */
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410
1411/* Opcode VEX.F3.0F 0x13 - invalid */
1412/* Opcode VEX.F2.0F 0x13 - invalid */
1413
1414/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1415FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1416{
1417 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1418 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1420}
1421
1422
1423/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1424FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1425{
1426 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1427 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1429}
1430
1431
1432/* Opcode VEX.F3.0F 0x14 - invalid */
1433/* Opcode VEX.F2.0F 0x14 - invalid */
1434
1435
1436/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1437FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1438{
1439 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1440 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1442}
1443
1444
1445/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1446FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1447{
1448 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1449 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1451}
1452
1453
1454/* Opcode VEX.F3.0F 0x15 - invalid */
1455/* Opcode VEX.F2.0F 0x15 - invalid */
1456
1457
1458FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1459{
1460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1461 if (IEM_IS_MODRM_REG_MODE(bRm))
1462 {
1463 /**
1464 * @opcode 0x16
1465 * @opcodesub 11 mr/reg
1466 * @oppfx none
1467 * @opcpuid avx
1468 * @opgroup og_avx_simdfp_datamerge
1469 * @opxcpttype 7LZ
1470 */
1471 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1472
1473 IEMOP_HLP_DONE_VEX_DECODING_L0();
1474 IEM_MC_BEGIN(0, 0);
1475
1476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1478 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1479 IEM_GET_MODRM_RM(pVCpu, bRm),
1480 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 else
1486 {
1487 /**
1488 * @opdone
1489 * @opcode 0x16
1490 * @opcodesub !11 mr/reg
1491 * @oppfx none
1492 * @opcpuid avx
1493 * @opgroup og_avx_simdfp_datamove
1494 * @opxcpttype 5LZ
1495 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1496 */
1497 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1498
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(uint64_t, uSrc);
1501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_VEX_DECODING_L0();
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1507
1508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1509 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1510 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1511 uSrc);
1512
1513 IEM_MC_ADVANCE_RIP();
1514 IEM_MC_END();
1515 }
1516 return VINF_SUCCESS;
1517}
1518
1519
1520/**
1521 * @opcode 0x16
1522 * @opcodesub !11 mr/reg
1523 * @oppfx 0x66
1524 * @opcpuid avx
1525 * @opgroup og_avx_pcksclr_datamerge
1526 * @opxcpttype 5LZ
1527 */
1528FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1529{
1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1531 if (IEM_IS_MODRM_MEM_MODE(bRm))
1532 {
1533 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1534
1535 IEM_MC_BEGIN(0, 2);
1536 IEM_MC_LOCAL(uint64_t, uSrc);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 IEMOP_HLP_DONE_VEX_DECODING_L0();
1541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1543
1544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1545 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1546 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1547 uSrc);
1548
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 return VINF_SUCCESS;
1552 }
1553
1554 /**
1555 * @opdone
1556 * @opmnemonic udvex660f16m3
1557 * @opcode 0x12
1558 * @opcodesub 11 mr/reg
1559 * @oppfx 0x66
1560 * @opunused immediate
1561 * @opcpuid avx
1562 * @optest ->
1563 */
1564 return IEMOP_RAISE_INVALID_OPCODE();
1565}
1566
1567
1568/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1569/**
1570 * @opcode 0x16
1571 * @oppfx 0xf3
1572 * @opcpuid avx
1573 * @opgroup og_avx_pcksclr_datamove
1574 * @opxcpttype 4
1575 */
1576FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1577{
1578 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1579 Assert(pVCpu->iem.s.uVexLength <= 1);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 if (IEM_IS_MODRM_REG_MODE(bRm))
1582 {
1583 /*
1584 * Register, register.
1585 */
1586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1587 if (pVCpu->iem.s.uVexLength == 0)
1588 {
1589 IEM_MC_BEGIN(2, 0);
1590 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1591 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595
1596 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1599 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1600
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1608 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1609 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1610
1611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1612 IEM_MC_PREPARE_AVX_USAGE();
1613 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 }
1619 else
1620 {
1621 /*
1622 * Register, memory.
1623 */
1624 if (pVCpu->iem.s.uVexLength == 0)
1625 {
1626 IEM_MC_BEGIN(2, 2);
1627 IEM_MC_LOCAL(RTUINT128U, uSrc);
1628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1629 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1630 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1631
1632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1634 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1635 IEM_MC_PREPARE_AVX_USAGE();
1636
1637 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1639 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1640 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1641
1642 IEM_MC_ADVANCE_RIP();
1643 IEM_MC_END();
1644 }
1645 else
1646 {
1647 IEM_MC_BEGIN(3, 2);
1648 IEM_MC_LOCAL(RTUINT256U, uSrc);
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1651 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1653
1654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_PREPARE_AVX_USAGE();
1658
1659 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1660 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1661
1662 IEM_MC_ADVANCE_RIP();
1663 IEM_MC_END();
1664 }
1665 }
1666 return VINF_SUCCESS;
1667}
1668
1669
1670/* Opcode VEX.F2.0F 0x16 - invalid */
1671
1672
1673/**
1674 * @opcode 0x17
1675 * @opcodesub !11 mr/reg
1676 * @oppfx none
1677 * @opcpuid avx
1678 * @opgroup og_avx_simdfp_datamove
1679 * @opxcpttype 5
1680 */
1681FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1682{
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if (IEM_IS_MODRM_MEM_MODE(bRm))
1685 {
1686 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1687
1688 IEM_MC_BEGIN(0, 2);
1689 IEM_MC_LOCAL(uint64_t, uSrc);
1690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1691
1692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1693 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1695 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1696
1697 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1698 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1699
1700 IEM_MC_ADVANCE_RIP();
1701 IEM_MC_END();
1702 return VINF_SUCCESS;
1703 }
1704
1705 /**
1706 * @opdone
1707 * @opmnemonic udvex0f17m3
1708 * @opcode 0x17
1709 * @opcodesub 11 mr/reg
1710 * @oppfx none
1711 * @opunused immediate
1712 * @opcpuid avx
1713 * @optest ->
1714 */
1715 return IEMOP_RAISE_INVALID_OPCODE();
1716}
1717
1718
1719/**
1720 * @opcode 0x17
1721 * @opcodesub !11 mr/reg
1722 * @oppfx 0x66
1723 * @opcpuid avx
1724 * @opgroup og_avx_pcksclr_datamove
1725 * @opxcpttype 5
1726 */
1727FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1728{
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730 if (IEM_IS_MODRM_MEM_MODE(bRm))
1731 {
1732 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733 IEM_MC_BEGIN(0, 2);
1734 IEM_MC_LOCAL(uint64_t, uSrc);
1735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1736
1737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1738 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1740 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1741
1742 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1744
1745 IEM_MC_ADVANCE_RIP();
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748 }
1749
1750 /**
1751 * @opdone
1752 * @opmnemonic udvex660f17m3
1753 * @opcode 0x17
1754 * @opcodesub 11 mr/reg
1755 * @oppfx 0x66
1756 * @opunused immediate
1757 * @opcpuid avx
1758 * @optest ->
1759 */
1760 return IEMOP_RAISE_INVALID_OPCODE();
1761}
1762
1763
1764/* Opcode VEX.F3.0F 0x17 - invalid */
1765/* Opcode VEX.F2.0F 0x17 - invalid */
1766
1767
1768/* Opcode VEX.0F 0x18 - invalid */
1769/* Opcode VEX.0F 0x19 - invalid */
1770/* Opcode VEX.0F 0x1a - invalid */
1771/* Opcode VEX.0F 0x1b - invalid */
1772/* Opcode VEX.0F 0x1c - invalid */
1773/* Opcode VEX.0F 0x1d - invalid */
1774/* Opcode VEX.0F 0x1e - invalid */
1775/* Opcode VEX.0F 0x1f - invalid */
1776
1777/* Opcode VEX.0F 0x20 - invalid */
1778/* Opcode VEX.0F 0x21 - invalid */
1779/* Opcode VEX.0F 0x22 - invalid */
1780/* Opcode VEX.0F 0x23 - invalid */
1781/* Opcode VEX.0F 0x24 - invalid */
1782/* Opcode VEX.0F 0x25 - invalid */
1783/* Opcode VEX.0F 0x26 - invalid */
1784/* Opcode VEX.0F 0x27 - invalid */
1785
1786/**
1787 * @opcode 0x28
1788 * @oppfx none
1789 * @opcpuid avx
1790 * @opgroup og_avx_pcksclr_datamove
1791 * @opxcpttype 1
1792 * @optest op1=1 op2=2 -> op1=2
1793 * @optest op1=0 op2=-42 -> op1=-42
1794 * @note Almost identical to vmovapd.
1795 */
1796FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1797{
1798 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 Assert(pVCpu->iem.s.uVexLength <= 1);
1801 if (IEM_IS_MODRM_REG_MODE(bRm))
1802 {
1803 /*
1804 * Register, register.
1805 */
1806 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1807 IEM_MC_BEGIN(1, 0);
1808
1809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1810 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1811 if (pVCpu->iem.s.uVexLength == 0)
1812 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1813 IEM_GET_MODRM_RM(pVCpu, bRm));
1814 else
1815 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1816 IEM_GET_MODRM_RM(pVCpu, bRm));
1817 IEM_MC_ADVANCE_RIP();
1818 IEM_MC_END();
1819 }
1820 else
1821 {
1822 /*
1823 * Register, memory.
1824 */
1825 if (pVCpu->iem.s.uVexLength == 0)
1826 {
1827 IEM_MC_BEGIN(0, 2);
1828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1829 IEM_MC_LOCAL(RTUINT128U, uSrc);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1834 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1835
1836 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1837 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 IEM_MC_BEGIN(0, 2);
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1852
1853 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1854 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1855
1856 IEM_MC_ADVANCE_RIP();
1857 IEM_MC_END();
1858 }
1859 }
1860 return VINF_SUCCESS;
1861}
1862
1863
1864/**
1865 * @opcode 0x28
1866 * @oppfx 66
1867 * @opcpuid avx
1868 * @opgroup og_avx_pcksclr_datamove
1869 * @opxcpttype 1
1870 * @optest op1=1 op2=2 -> op1=2
1871 * @optest op1=0 op2=-42 -> op1=-42
1872 * @note Almost identical to vmovaps
1873 */
1874FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1875{
1876 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 Assert(pVCpu->iem.s.uVexLength <= 1);
1879 if (IEM_IS_MODRM_REG_MODE(bRm))
1880 {
1881 /*
1882 * Register, register.
1883 */
1884 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1885 IEM_MC_BEGIN(1, 0);
1886
1887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1889 if (pVCpu->iem.s.uVexLength == 0)
1890 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1891 IEM_GET_MODRM_RM(pVCpu, bRm));
1892 else
1893 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1894 IEM_GET_MODRM_RM(pVCpu, bRm));
1895 IEM_MC_ADVANCE_RIP();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 /*
1901 * Register, memory.
1902 */
1903 if (pVCpu->iem.s.uVexLength == 0)
1904 {
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1911 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1913
1914 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1916
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 else
1921 {
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1924 IEM_MC_LOCAL(RTUINT256U, uSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 }
1938 return VINF_SUCCESS;
1939}
1940
1941/**
1942 * @opmnemonic udvexf30f28
1943 * @opcode 0x28
1944 * @oppfx 0xf3
1945 * @opunused vex.modrm
1946 * @opcpuid avx
1947 * @optest ->
1948 * @opdone
1949 */
1950
1951/**
1952 * @opmnemonic udvexf20f28
1953 * @opcode 0x28
1954 * @oppfx 0xf2
1955 * @opunused vex.modrm
1956 * @opcpuid avx
1957 * @optest ->
1958 * @opdone
1959 */
1960
1961/**
1962 * @opcode 0x29
1963 * @oppfx none
1964 * @opcpuid avx
1965 * @opgroup og_avx_pcksclr_datamove
1966 * @opxcpttype 1
1967 * @optest op1=1 op2=2 -> op1=2
1968 * @optest op1=0 op2=-42 -> op1=-42
1969 * @note Almost identical to vmovapd.
1970 */
1971FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1972{
1973 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 Assert(pVCpu->iem.s.uVexLength <= 1);
1976 if (IEM_IS_MODRM_REG_MODE(bRm))
1977 {
1978 /*
1979 * Register, register.
1980 */
1981 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1982 IEM_MC_BEGIN(1, 0);
1983
1984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1986 if (pVCpu->iem.s.uVexLength == 0)
1987 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1988 IEM_GET_MODRM_REG(pVCpu, bRm));
1989 else
1990 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1991 IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_ADVANCE_RIP();
1993 IEM_MC_END();
1994 }
1995 else
1996 {
1997 /*
1998 * Register, memory.
1999 */
2000 if (pVCpu->iem.s.uVexLength == 0)
2001 {
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004 IEM_MC_LOCAL(RTUINT128U, uSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2010
2011 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2012 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP();
2015 IEM_MC_END();
2016 }
2017 else
2018 {
2019 IEM_MC_BEGIN(0, 2);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_LOCAL(RTUINT256U, uSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2027
2028 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2029 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2030
2031 IEM_MC_ADVANCE_RIP();
2032 IEM_MC_END();
2033 }
2034 }
2035 return VINF_SUCCESS;
2036}
2037
2038/**
2039 * @opcode 0x29
2040 * @oppfx 66
2041 * @opcpuid avx
2042 * @opgroup og_avx_pcksclr_datamove
2043 * @opxcpttype 1
2044 * @optest op1=1 op2=2 -> op1=2
2045 * @optest op1=0 op2=-42 -> op1=-42
2046 * @note Almost identical to vmovaps
2047 */
2048FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2049{
2050 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2051 Assert(pVCpu->iem.s.uVexLength <= 1);
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 if (IEM_IS_MODRM_REG_MODE(bRm))
2054 {
2055 /*
2056 * Register, register.
2057 */
2058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2059 IEM_MC_BEGIN(1, 0);
2060
2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2063 if (pVCpu->iem.s.uVexLength == 0)
2064 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2065 IEM_GET_MODRM_REG(pVCpu, bRm));
2066 else
2067 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2068 IEM_GET_MODRM_REG(pVCpu, bRm));
2069 IEM_MC_ADVANCE_RIP();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 /*
2075 * Register, memory.
2076 */
2077 if (pVCpu->iem.s.uVexLength == 0)
2078 {
2079 IEM_MC_BEGIN(0, 2);
2080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2081 IEM_MC_LOCAL(RTUINT128U, uSrc);
2082
2083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2084 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2086 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2087
2088 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2089 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2090
2091 IEM_MC_ADVANCE_RIP();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT256U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2106 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/**
2117 * @opmnemonic udvexf30f29
2118 * @opcode 0x29
2119 * @oppfx 0xf3
2120 * @opunused vex.modrm
2121 * @opcpuid avx
2122 * @optest ->
2123 * @opdone
2124 */
2125
2126/**
2127 * @opmnemonic udvexf20f29
2128 * @opcode 0x29
2129 * @oppfx 0xf2
2130 * @opunused vex.modrm
2131 * @opcpuid avx
2132 * @optest ->
2133 * @opdone
2134 */
2135
2136
2137/** Opcode VEX.0F 0x2a - invalid */
2138/** Opcode VEX.66.0F 0x2a - invalid */
2139/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2140FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2141/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2142FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2143
2144
2145/**
2146 * @opcode 0x2b
2147 * @opcodesub !11 mr/reg
2148 * @oppfx none
2149 * @opcpuid avx
2150 * @opgroup og_avx_cachect
2151 * @opxcpttype 1
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 * @note Identical implementation to vmovntpd
2155 */
2156FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2157{
2158 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2159 Assert(pVCpu->iem.s.uVexLength <= 1);
2160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2161 if (IEM_IS_MODRM_MEM_MODE(bRm))
2162 {
2163 /*
2164 * memory, register.
2165 */
2166 if (pVCpu->iem.s.uVexLength == 0)
2167 {
2168 IEM_MC_BEGIN(0, 2);
2169 IEM_MC_LOCAL(RTUINT128U, uSrc);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2174 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2175 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2176
2177 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2178 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2179
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 IEM_MC_BEGIN(0, 2);
2186 IEM_MC_LOCAL(RTUINT256U, uSrc);
2187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2188
2189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2193
2194 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2195 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2196
2197 IEM_MC_ADVANCE_RIP();
2198 IEM_MC_END();
2199 }
2200 }
2201 /* The register, register encoding is invalid. */
2202 else
2203 return IEMOP_RAISE_INVALID_OPCODE();
2204 return VINF_SUCCESS;
2205}
2206
2207/**
2208 * @opcode 0x2b
2209 * @opcodesub !11 mr/reg
2210 * @oppfx 0x66
2211 * @opcpuid avx
2212 * @opgroup og_avx_cachect
2213 * @opxcpttype 1
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 * @note Identical implementation to vmovntps
2217 */
2218FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2219{
2220 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2223 if (IEM_IS_MODRM_MEM_MODE(bRm))
2224 {
2225 /*
2226 * memory, register.
2227 */
2228 if (pVCpu->iem.s.uVexLength == 0)
2229 {
2230 IEM_MC_BEGIN(0, 2);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc);
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2236 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2240 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP();
2243 IEM_MC_END();
2244 }
2245 else
2246 {
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(RTUINT256U, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2255
2256 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2257 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2258
2259 IEM_MC_ADVANCE_RIP();
2260 IEM_MC_END();
2261 }
2262 }
2263 /* The register, register encoding is invalid. */
2264 else
2265 return IEMOP_RAISE_INVALID_OPCODE();
2266 return VINF_SUCCESS;
2267}
2268
2269/**
2270 * @opmnemonic udvexf30f2b
2271 * @opcode 0x2b
2272 * @oppfx 0xf3
2273 * @opunused vex.modrm
2274 * @opcpuid avx
2275 * @optest ->
2276 * @opdone
2277 */
2278
2279/**
2280 * @opmnemonic udvexf20f2b
2281 * @opcode 0x2b
2282 * @oppfx 0xf2
2283 * @opunused vex.modrm
2284 * @opcpuid avx
2285 * @optest ->
2286 * @opdone
2287 */
2288
2289
2290/* Opcode VEX.0F 0x2c - invalid */
2291/* Opcode VEX.66.0F 0x2c - invalid */
2292/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2293FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2294/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2295FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2296
2297/* Opcode VEX.0F 0x2d - invalid */
2298/* Opcode VEX.66.0F 0x2d - invalid */
2299/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2300FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2301/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2302FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2303
2304
2305/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2306FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2307{
2308 IEMOP_MNEMONIC2(RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if (IEM_IS_MODRM_REG_MODE(bRm))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2316 IEM_MC_BEGIN(4, 1);
2317 IEM_MC_LOCAL(uint32_t, fEFlags);
2318 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2320 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2321 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2323 IEM_MC_PREPARE_AVX_USAGE();
2324 IEM_MC_FETCH_EFLAGS(fEFlags);
2325 IEM_MC_REF_MXCSR(pfMxcsr);
2326 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2327 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2328 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2329 pfMxcsr, pEFlags, puSrc1, puSrc2);
2330 IEM_MC_IF_MXCSR_XCPT_PENDING()
2331 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2332 IEM_MC_ELSE()
2333 IEM_MC_COMMIT_EFLAGS(fEFlags);
2334 IEM_MC_ENDIF();
2335
2336 IEM_MC_ADVANCE_RIP();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Register, memory.
2343 */
2344 IEM_MC_BEGIN(4, 3);
2345 IEM_MC_LOCAL(uint32_t, fEFlags);
2346 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2347 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2348 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2349 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2350 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2352
2353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2356 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2357
2358 IEM_MC_PREPARE_AVX_USAGE();
2359 IEM_MC_FETCH_EFLAGS(fEFlags);
2360 IEM_MC_REF_MXCSR(pfMxcsr);
2361 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2362 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2363 pfMxcsr, pEFlags, puSrc1, puSrc2);
2364 IEM_MC_IF_MXCSR_XCPT_PENDING()
2365 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2366 IEM_MC_ELSE()
2367 IEM_MC_COMMIT_EFLAGS(fEFlags);
2368 IEM_MC_ENDIF();
2369
2370 IEM_MC_ADVANCE_RIP();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2378FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2379{
2380 IEMOP_MNEMONIC2(RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2382 if (IEM_IS_MODRM_REG_MODE(bRm))
2383 {
2384 /*
2385 * Register, register.
2386 */
2387 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2388 IEM_MC_BEGIN(4, 1);
2389 IEM_MC_LOCAL(uint32_t, fEFlags);
2390 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2391 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2392 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2393 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2394 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2395 IEM_MC_PREPARE_AVX_USAGE();
2396 IEM_MC_FETCH_EFLAGS(fEFlags);
2397 IEM_MC_REF_MXCSR(pfMxcsr);
2398 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2400 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2401 pfMxcsr, pEFlags, puSrc1, puSrc2);
2402 IEM_MC_IF_MXCSR_XCPT_PENDING()
2403 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2404 IEM_MC_ELSE()
2405 IEM_MC_COMMIT_EFLAGS(fEFlags);
2406 IEM_MC_ENDIF();
2407
2408 IEM_MC_ADVANCE_RIP();
2409 IEM_MC_END();
2410 }
2411 else
2412 {
2413 /*
2414 * Register, memory.
2415 */
2416 IEM_MC_BEGIN(4, 3);
2417 IEM_MC_LOCAL(uint32_t, fEFlags);
2418 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2419 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2420 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2421 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2422 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2424
2425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2426 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2428 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2429
2430 IEM_MC_PREPARE_AVX_USAGE();
2431 IEM_MC_FETCH_EFLAGS(fEFlags);
2432 IEM_MC_REF_MXCSR(pfMxcsr);
2433 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2434 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2435 pfMxcsr, pEFlags, puSrc1, puSrc2);
2436 IEM_MC_IF_MXCSR_XCPT_PENDING()
2437 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2438 IEM_MC_ELSE()
2439 IEM_MC_COMMIT_EFLAGS(fEFlags);
2440 IEM_MC_ENDIF();
2441
2442 IEM_MC_ADVANCE_RIP();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446}
2447
2448
2449/* Opcode VEX.F3.0F 0x2e - invalid */
2450/* Opcode VEX.F2.0F 0x2e - invalid */
2451
2452/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2453FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2454{
2455 IEMOP_MNEMONIC2(RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2457 if (IEM_IS_MODRM_REG_MODE(bRm))
2458 {
2459 /*
2460 * Register, register.
2461 */
2462 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2463 IEM_MC_BEGIN(4, 1);
2464 IEM_MC_LOCAL(uint32_t, fEFlags);
2465 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2466 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2467 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2468 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2470 IEM_MC_PREPARE_AVX_USAGE();
2471 IEM_MC_FETCH_EFLAGS(fEFlags);
2472 IEM_MC_REF_MXCSR(pfMxcsr);
2473 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2474 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2475 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2476 pfMxcsr, pEFlags, puSrc1, puSrc2);
2477 IEM_MC_IF_MXCSR_XCPT_PENDING()
2478 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2479 IEM_MC_ELSE()
2480 IEM_MC_COMMIT_EFLAGS(fEFlags);
2481 IEM_MC_ENDIF();
2482
2483 IEM_MC_ADVANCE_RIP();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Register, memory.
2490 */
2491 IEM_MC_BEGIN(4, 3);
2492 IEM_MC_LOCAL(uint32_t, fEFlags);
2493 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2494 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2495 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2496 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2497 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2499
2500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2501 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2502 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2503 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2504
2505 IEM_MC_PREPARE_AVX_USAGE();
2506 IEM_MC_FETCH_EFLAGS(fEFlags);
2507 IEM_MC_REF_MXCSR(pfMxcsr);
2508 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2509 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2510 pfMxcsr, pEFlags, puSrc1, puSrc2);
2511 IEM_MC_IF_MXCSR_XCPT_PENDING()
2512 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2513 IEM_MC_ELSE()
2514 IEM_MC_COMMIT_EFLAGS(fEFlags);
2515 IEM_MC_ENDIF();
2516
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 return VINF_SUCCESS;
2521}
2522
2523
2524/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2525FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2526{
2527 IEMOP_MNEMONIC2(RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register, register.
2533 */
2534 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2535 IEM_MC_BEGIN(4, 1);
2536 IEM_MC_LOCAL(uint32_t, fEFlags);
2537 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2538 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2539 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2540 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2542 IEM_MC_PREPARE_AVX_USAGE();
2543 IEM_MC_FETCH_EFLAGS(fEFlags);
2544 IEM_MC_REF_MXCSR(pfMxcsr);
2545 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2547 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2548 pfMxcsr, pEFlags, puSrc1, puSrc2);
2549 IEM_MC_IF_MXCSR_XCPT_PENDING()
2550 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2551 IEM_MC_ELSE()
2552 IEM_MC_COMMIT_EFLAGS(fEFlags);
2553 IEM_MC_ENDIF();
2554
2555 IEM_MC_ADVANCE_RIP();
2556 IEM_MC_END();
2557 }
2558 else
2559 {
2560 /*
2561 * Register, memory.
2562 */
2563 IEM_MC_BEGIN(4, 3);
2564 IEM_MC_LOCAL(uint32_t, fEFlags);
2565 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2566 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2567 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2568 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2569 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2571
2572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2575 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576
2577 IEM_MC_PREPARE_AVX_USAGE();
2578 IEM_MC_FETCH_EFLAGS(fEFlags);
2579 IEM_MC_REF_MXCSR(pfMxcsr);
2580 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2581 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2582 pfMxcsr, pEFlags, puSrc1, puSrc2);
2583 IEM_MC_IF_MXCSR_XCPT_PENDING()
2584 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2585 IEM_MC_ELSE()
2586 IEM_MC_COMMIT_EFLAGS(fEFlags);
2587 IEM_MC_ENDIF();
2588
2589 IEM_MC_ADVANCE_RIP();
2590 IEM_MC_END();
2591 }
2592 return VINF_SUCCESS;
2593}
2594
2595
2596/* Opcode VEX.F3.0F 0x2f - invalid */
2597/* Opcode VEX.F2.0F 0x2f - invalid */
2598
2599/* Opcode VEX.0F 0x30 - invalid */
2600/* Opcode VEX.0F 0x31 - invalid */
2601/* Opcode VEX.0F 0x32 - invalid */
2602/* Opcode VEX.0F 0x33 - invalid */
2603/* Opcode VEX.0F 0x34 - invalid */
2604/* Opcode VEX.0F 0x35 - invalid */
2605/* Opcode VEX.0F 0x36 - invalid */
2606/* Opcode VEX.0F 0x37 - invalid */
2607/* Opcode VEX.0F 0x38 - invalid */
2608/* Opcode VEX.0F 0x39 - invalid */
2609/* Opcode VEX.0F 0x3a - invalid */
2610/* Opcode VEX.0F 0x3b - invalid */
2611/* Opcode VEX.0F 0x3c - invalid */
2612/* Opcode VEX.0F 0x3d - invalid */
2613/* Opcode VEX.0F 0x3e - invalid */
2614/* Opcode VEX.0F 0x3f - invalid */
2615/* Opcode VEX.0F 0x40 - invalid */
2616/* Opcode VEX.0F 0x41 - invalid */
2617/* Opcode VEX.0F 0x42 - invalid */
2618/* Opcode VEX.0F 0x43 - invalid */
2619/* Opcode VEX.0F 0x44 - invalid */
2620/* Opcode VEX.0F 0x45 - invalid */
2621/* Opcode VEX.0F 0x46 - invalid */
2622/* Opcode VEX.0F 0x47 - invalid */
2623/* Opcode VEX.0F 0x48 - invalid */
2624/* Opcode VEX.0F 0x49 - invalid */
2625/* Opcode VEX.0F 0x4a - invalid */
2626/* Opcode VEX.0F 0x4b - invalid */
2627/* Opcode VEX.0F 0x4c - invalid */
2628/* Opcode VEX.0F 0x4d - invalid */
2629/* Opcode VEX.0F 0x4e - invalid */
2630/* Opcode VEX.0F 0x4f - invalid */
2631
2632
2633/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2634FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2635{
2636 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2638 if (IEM_IS_MODRM_REG_MODE(bRm))
2639 {
2640 /*
2641 * Register, register.
2642 */
2643 if (pVCpu->iem.s.uVexLength == 0)
2644 {
2645 IEMOP_HLP_DONE_VEX_DECODING();
2646 IEM_MC_BEGIN(2, 1);
2647 IEM_MC_LOCAL(uint8_t, u8Dst);
2648 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2649 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2650 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2651 IEM_MC_PREPARE_AVX_USAGE();
2652 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2653 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2654 pu8Dst, puSrc);
2655 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2656 IEM_MC_ADVANCE_RIP();
2657 IEM_MC_END();
2658 }
2659 else
2660 {
2661 IEMOP_HLP_DONE_VEX_DECODING();
2662 IEM_MC_BEGIN(2, 2);
2663 IEM_MC_LOCAL(uint8_t, u8Dst);
2664 IEM_MC_LOCAL(RTUINT256U, uSrc);
2665 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2666 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2667
2668 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2669 IEM_MC_PREPARE_AVX_USAGE();
2670 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2671 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2672 pu8Dst, puSrc);
2673 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2674 IEM_MC_ADVANCE_RIP();
2675 IEM_MC_END();
2676 }
2677 return VINF_SUCCESS;
2678 }
2679
2680 /* No memory operand. */
2681 return IEMOP_RAISE_INVALID_OPCODE();
2682}
2683
2684
2685/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2686FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2687{
2688{
2689 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2691 if (IEM_IS_MODRM_REG_MODE(bRm))
2692 {
2693 /*
2694 * Register, register.
2695 */
2696 if (pVCpu->iem.s.uVexLength == 0)
2697 {
2698 IEMOP_HLP_DONE_VEX_DECODING();
2699 IEM_MC_BEGIN(2, 1);
2700 IEM_MC_LOCAL(uint8_t, u8Dst);
2701 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2702 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2703 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2704 IEM_MC_PREPARE_AVX_USAGE();
2705 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2706 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2707 pu8Dst, puSrc);
2708 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2709 IEM_MC_ADVANCE_RIP();
2710 IEM_MC_END();
2711 }
2712 else
2713 {
2714 IEMOP_HLP_DONE_VEX_DECODING();
2715 IEM_MC_BEGIN(2, 2);
2716 IEM_MC_LOCAL(uint8_t, u8Dst);
2717 IEM_MC_LOCAL(RTUINT256U, uSrc);
2718 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2719 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2720
2721 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2722 IEM_MC_PREPARE_AVX_USAGE();
2723 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2724 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2725 pu8Dst, puSrc);
2726 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2727 IEM_MC_ADVANCE_RIP();
2728 IEM_MC_END();
2729 }
2730 return VINF_SUCCESS;
2731 }
2732
2733 /* No memory operand. */
2734 return IEMOP_RAISE_INVALID_OPCODE();
2735}
2736}
2737
2738
2739/* Opcode VEX.F3.0F 0x50 - invalid */
2740/* Opcode VEX.F2.0F 0x50 - invalid */
2741
2742/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2743FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2744/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2745FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2746/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2747FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2748/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2749FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2750
2751/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2752FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2753/* Opcode VEX.66.0F 0x52 - invalid */
2754/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2755FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2756/* Opcode VEX.F2.0F 0x52 - invalid */
2757
2758/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2759FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2760/* Opcode VEX.66.0F 0x53 - invalid */
2761/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2762FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2763/* Opcode VEX.F2.0F 0x53 - invalid */
2764
2765
2766/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2767FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2768{
2769 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2770 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2771 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2772}
2773
2774
2775/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2776FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2777{
2778 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2779 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2780 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2781}
2782
2783
2784/* Opcode VEX.F3.0F 0x54 - invalid */
2785/* Opcode VEX.F2.0F 0x54 - invalid */
2786
2787
2788/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2789FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2790{
2791 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2792 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2793 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2794}
2795
2796
2797/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2798FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2799{
2800 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2802 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2803}
2804
2805
2806/* Opcode VEX.F3.0F 0x55 - invalid */
2807/* Opcode VEX.F2.0F 0x55 - invalid */
2808
2809/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2810FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2811{
2812 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2814 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2815}
2816
2817
2818/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2819FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2820{
2821 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2823 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2824}
2825
2826
2827/* Opcode VEX.F3.0F 0x56 - invalid */
2828/* Opcode VEX.F2.0F 0x56 - invalid */
2829
2830
2831/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2832FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2833{
2834 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2836 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2837}
2838
2839
2840/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2841FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2842{
2843 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2845 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2846}
2847
2848
2849/* Opcode VEX.F3.0F 0x57 - invalid */
2850/* Opcode VEX.F2.0F 0x57 - invalid */
2851
2852/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2853FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2854/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2855FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2856/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2857FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2858/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2859FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2860
2861/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2862FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2863/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2864FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2865/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2866FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2867/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2868FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2869
2870/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2871FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2872/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2873FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2874/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2875FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2876/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2877FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2878
2879/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2880FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2881/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2882FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2883/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2884FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2885/* Opcode VEX.F2.0F 0x5b - invalid */
2886
2887/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2888FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2889/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2890FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2891/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2892FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2893/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2894FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2895
2896/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2897FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2898/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2899FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2900/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2901FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2902/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2903FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2904
2905/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2906FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2907/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2908FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2909/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2910FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2911/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2912FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2913
2914/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2915FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2916/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2917FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2918/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2919FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2920/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2921FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2922
2923
2924/* Opcode VEX.0F 0x60 - invalid */
2925
2926
2927/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2928FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2929{
2930 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2931 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2932 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2933}
2934
2935
2936/* Opcode VEX.F3.0F 0x60 - invalid */
2937
2938
2939/* Opcode VEX.0F 0x61 - invalid */
2940
2941
2942/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2943FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2944{
2945 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2946 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2947 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2948}
2949
2950
2951/* Opcode VEX.F3.0F 0x61 - invalid */
2952
2953
2954/* Opcode VEX.0F 0x62 - invalid */
2955
2956/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2957FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2958{
2959 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2960 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2961 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2962}
2963
2964
2965/* Opcode VEX.F3.0F 0x62 - invalid */
2966
2967
2968
2969/* Opcode VEX.0F 0x63 - invalid */
2970
2971
2972/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2973FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2974{
2975 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2976 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2977 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2978}
2979
2980
2981/* Opcode VEX.F3.0F 0x63 - invalid */
2982
2983/* Opcode VEX.0F 0x64 - invalid */
2984
2985
2986/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2987FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2988{
2989 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2990 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2991 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2992}
2993
2994
2995/* Opcode VEX.F3.0F 0x64 - invalid */
2996
2997/* Opcode VEX.0F 0x65 - invalid */
2998
2999
3000/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3001FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3002{
3003 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3004 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
3005 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3006}
3007
3008
3009/* Opcode VEX.F3.0F 0x65 - invalid */
3010
3011/* Opcode VEX.0F 0x66 - invalid */
3012
3013
3014/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3015FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3016{
3017 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3018 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3019 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3020}
3021
3022
3023/* Opcode VEX.F3.0F 0x66 - invalid */
3024
3025/* Opcode VEX.0F 0x67 - invalid */
3026
3027
3028/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3029FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3030{
3031 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3032 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3033 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3034}
3035
3036
3037/* Opcode VEX.F3.0F 0x67 - invalid */
3038
3039
3040///**
3041// * Common worker for SSE2 instructions on the form:
3042// * pxxxx xmm1, xmm2/mem128
3043// *
3044// * The 2nd operand is the second half of a register, which in the memory case
3045// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3046// * where it may read the full 128 bits or only the upper 64 bits.
3047// *
3048// * Exceptions type 4.
3049// */
3050//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3051//{
3052// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3053// if (IEM_IS_MODRM_REG_MODE(bRm))
3054// {
3055// /*
3056// * Register, register.
3057// */
3058// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059// IEM_MC_BEGIN(2, 0);
3060// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3061// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3062// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3063// IEM_MC_PREPARE_SSE_USAGE();
3064// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3065// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3066// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3067// IEM_MC_ADVANCE_RIP();
3068// IEM_MC_END();
3069// }
3070// else
3071// {
3072// /*
3073// * Register, memory.
3074// */
3075// IEM_MC_BEGIN(2, 2);
3076// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3077// IEM_MC_LOCAL(RTUINT128U, uSrc);
3078// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3079// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3080//
3081// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3082// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3085//
3086// IEM_MC_PREPARE_SSE_USAGE();
3087// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3088// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3089//
3090// IEM_MC_ADVANCE_RIP();
3091// IEM_MC_END();
3092// }
3093// return VINF_SUCCESS;
3094//}
3095
3096
3097/* Opcode VEX.0F 0x68 - invalid */
3098
3099/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3100FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3101{
3102 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3103 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3105}
3106
3107
3108/* Opcode VEX.F3.0F 0x68 - invalid */
3109
3110
3111/* Opcode VEX.0F 0x69 - invalid */
3112
3113
3114/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3115FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3116{
3117 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3118 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3119 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3120}
3121
3122
3123/* Opcode VEX.F3.0F 0x69 - invalid */
3124
3125
3126/* Opcode VEX.0F 0x6a - invalid */
3127
3128
3129/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3130FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3131{
3132 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3133 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3134 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3135}
3136
3137
3138/* Opcode VEX.F3.0F 0x6a - invalid */
3139
3140
3141/* Opcode VEX.0F 0x6b - invalid */
3142
3143
3144/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3145FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3146{
3147 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3148 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3149 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3150}
3151
3152
3153/* Opcode VEX.F3.0F 0x6b - invalid */
3154
3155
3156/* Opcode VEX.0F 0x6c - invalid */
3157
3158
3159/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3160FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3161{
3162 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3163 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3165}
3166
3167
3168/* Opcode VEX.F3.0F 0x6c - invalid */
3169/* Opcode VEX.F2.0F 0x6c - invalid */
3170
3171
3172/* Opcode VEX.0F 0x6d - invalid */
3173
3174
3175/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3176FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3177{
3178 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3179 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3180 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3181}
3182
3183
3184/* Opcode VEX.F3.0F 0x6d - invalid */
3185
3186
3187/* Opcode VEX.0F 0x6e - invalid */
3188
3189FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3190{
3191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3193 {
3194 /**
3195 * @opcode 0x6e
3196 * @opcodesub rex.w=1
3197 * @oppfx 0x66
3198 * @opcpuid avx
3199 * @opgroup og_avx_simdint_datamov
3200 * @opxcpttype 5
3201 * @optest 64-bit / op1=1 op2=2 -> op1=2
3202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3203 */
3204 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3205 if (IEM_IS_MODRM_REG_MODE(bRm))
3206 {
3207 /* XMM, greg64 */
3208 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3209 IEM_MC_BEGIN(0, 1);
3210 IEM_MC_LOCAL(uint64_t, u64Tmp);
3211
3212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3214
3215 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3216 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3217
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 else
3222 {
3223 /* XMM, [mem64] */
3224 IEM_MC_BEGIN(0, 2);
3225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3226 IEM_MC_LOCAL(uint64_t, u64Tmp);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3229 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3232
3233 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3234 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3235
3236 IEM_MC_ADVANCE_RIP();
3237 IEM_MC_END();
3238 }
3239 }
3240 else
3241 {
3242 /**
3243 * @opdone
3244 * @opcode 0x6e
3245 * @opcodesub rex.w=0
3246 * @oppfx 0x66
3247 * @opcpuid avx
3248 * @opgroup og_avx_simdint_datamov
3249 * @opxcpttype 5
3250 * @opfunction iemOp_vmovd_q_Vy_Ey
3251 * @optest op1=1 op2=2 -> op1=2
3252 * @optest op1=0 op2=-42 -> op1=-42
3253 */
3254 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3255 if (IEM_IS_MODRM_REG_MODE(bRm))
3256 {
3257 /* XMM, greg32 */
3258 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3259 IEM_MC_BEGIN(0, 1);
3260 IEM_MC_LOCAL(uint32_t, u32Tmp);
3261
3262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3264
3265 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3266 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3267
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 /* XMM, [mem32] */
3274 IEM_MC_BEGIN(0, 2);
3275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3276 IEM_MC_LOCAL(uint32_t, u32Tmp);
3277
3278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3279 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3280 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3281 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3282
3283 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3284 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3285
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 }
3289 }
3290 return VINF_SUCCESS;
3291}
3292
3293
3294/* Opcode VEX.F3.0F 0x6e - invalid */
3295
3296
3297/* Opcode VEX.0F 0x6f - invalid */
3298
3299/**
3300 * @opcode 0x6f
3301 * @oppfx 0x66
3302 * @opcpuid avx
3303 * @opgroup og_avx_simdint_datamove
3304 * @opxcpttype 1
3305 * @optest op1=1 op2=2 -> op1=2
3306 * @optest op1=0 op2=-42 -> op1=-42
3307 */
3308FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3309{
3310 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3311 Assert(pVCpu->iem.s.uVexLength <= 1);
3312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3313 if (IEM_IS_MODRM_REG_MODE(bRm))
3314 {
3315 /*
3316 * Register, register.
3317 */
3318 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3319 IEM_MC_BEGIN(0, 0);
3320
3321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3323 if (pVCpu->iem.s.uVexLength == 0)
3324 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3325 IEM_GET_MODRM_RM(pVCpu, bRm));
3326 else
3327 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3328 IEM_GET_MODRM_RM(pVCpu, bRm));
3329 IEM_MC_ADVANCE_RIP();
3330 IEM_MC_END();
3331 }
3332 else if (pVCpu->iem.s.uVexLength == 0)
3333 {
3334 /*
3335 * Register, memory128.
3336 */
3337 IEM_MC_BEGIN(0, 2);
3338 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3342 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3343 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3344 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3345
3346 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3347 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3348
3349 IEM_MC_ADVANCE_RIP();
3350 IEM_MC_END();
3351 }
3352 else
3353 {
3354 /*
3355 * Register, memory256.
3356 */
3357 IEM_MC_BEGIN(0, 2);
3358 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3360
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3365
3366 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3367 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3368
3369 IEM_MC_ADVANCE_RIP();
3370 IEM_MC_END();
3371 }
3372 return VINF_SUCCESS;
3373}
3374
3375/**
3376 * @opcode 0x6f
3377 * @oppfx 0xf3
3378 * @opcpuid avx
3379 * @opgroup og_avx_simdint_datamove
3380 * @opxcpttype 4UA
3381 * @optest op1=1 op2=2 -> op1=2
3382 * @optest op1=0 op2=-42 -> op1=-42
3383 */
3384FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3385{
3386 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3387 Assert(pVCpu->iem.s.uVexLength <= 1);
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389 if (IEM_IS_MODRM_REG_MODE(bRm))
3390 {
3391 /*
3392 * Register, register.
3393 */
3394 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3395 IEM_MC_BEGIN(0, 0);
3396
3397 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3399 if (pVCpu->iem.s.uVexLength == 0)
3400 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3401 IEM_GET_MODRM_RM(pVCpu, bRm));
3402 else
3403 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3404 IEM_GET_MODRM_RM(pVCpu, bRm));
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 }
3408 else if (pVCpu->iem.s.uVexLength == 0)
3409 {
3410 /*
3411 * Register, memory128.
3412 */
3413 IEM_MC_BEGIN(0, 2);
3414 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3416
3417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3418 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3419 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3421
3422 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3423 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3424
3425 IEM_MC_ADVANCE_RIP();
3426 IEM_MC_END();
3427 }
3428 else
3429 {
3430 /*
3431 * Register, memory256.
3432 */
3433 IEM_MC_BEGIN(0, 2);
3434 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3436
3437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3438 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3441
3442 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3443 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3444
3445 IEM_MC_ADVANCE_RIP();
3446 IEM_MC_END();
3447 }
3448 return VINF_SUCCESS;
3449}
3450
3451
3452/* Opcode VEX.0F 0x70 - invalid */
3453
3454
3455/**
3456 * Common worker for AVX/AVX2 instructions on the forms:
3457 * - vpxxx xmm0, xmm2/mem128, imm8
3458 * - vpxxx ymm0, ymm2/mem256, imm8
3459 *
3460 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3461 */
3462FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3463{
3464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3465 if (IEM_IS_MODRM_REG_MODE(bRm))
3466 {
3467 /*
3468 * Register, register.
3469 */
3470 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3471 if (pVCpu->iem.s.uVexLength)
3472 {
3473 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3474 IEM_MC_BEGIN(3, 2);
3475 IEM_MC_LOCAL(RTUINT256U, uDst);
3476 IEM_MC_LOCAL(RTUINT256U, uSrc);
3477 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3478 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3479 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3480 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3481 IEM_MC_PREPARE_AVX_USAGE();
3482 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3483 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3484 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3485 IEM_MC_ADVANCE_RIP();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3491 IEM_MC_BEGIN(3, 0);
3492 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3493 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3494 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3495 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3496 IEM_MC_PREPARE_AVX_USAGE();
3497 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3498 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3499 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3500 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /*
3508 * Register, memory.
3509 */
3510 if (pVCpu->iem.s.uVexLength)
3511 {
3512 IEM_MC_BEGIN(3, 3);
3513 IEM_MC_LOCAL(RTUINT256U, uDst);
3514 IEM_MC_LOCAL(RTUINT256U, uSrc);
3515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3516 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3517 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3518
3519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3520 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3521 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3522 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3523 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3524 IEM_MC_PREPARE_AVX_USAGE();
3525
3526 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3527 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3528 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3529
3530 IEM_MC_ADVANCE_RIP();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 IEM_MC_BEGIN(3, 1);
3536 IEM_MC_LOCAL(RTUINT128U, uSrc);
3537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3538 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3539 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3540
3541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3542 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3543 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3544 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3545 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3546 IEM_MC_PREPARE_AVX_USAGE();
3547
3548 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3549 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3550 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3551 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3552
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560
3561/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3562FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3563{
3564 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3565 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3566 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3567
3568}
3569
3570
3571/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3572FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3573{
3574 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3575 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3576 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3577
3578}
3579
3580
3581/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3582FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3583{
3584 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3585 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3586 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3587}
3588
3589
3590/* Opcode VEX.0F 0x71 11/2 - invalid. */
3591/** Opcode VEX.66.0F 0x71 11/2. */
3592FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3593
3594/* Opcode VEX.0F 0x71 11/4 - invalid */
3595/** Opcode VEX.66.0F 0x71 11/4. */
3596FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3597
3598/* Opcode VEX.0F 0x71 11/6 - invalid */
3599/** Opcode VEX.66.0F 0x71 11/6. */
3600FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3601
3602
3603/**
3604 * VEX Group 12 jump table for register variant.
3605 */
3606IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3607{
3608 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3609 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3610 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3611 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3612 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3613 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3614 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3615 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3616};
3617AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3618
3619
3620/** Opcode VEX.0F 0x71. */
3621FNIEMOP_DEF(iemOp_VGrp12)
3622{
3623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3624 if (IEM_IS_MODRM_REG_MODE(bRm))
3625 /* register, register */
3626 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3627 + pVCpu->iem.s.idxPrefix], bRm);
3628 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3629}
3630
3631
3632/* Opcode VEX.0F 0x72 11/2 - invalid. */
3633/** Opcode VEX.66.0F 0x72 11/2. */
3634FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3635
3636/* Opcode VEX.0F 0x72 11/4 - invalid. */
3637/** Opcode VEX.66.0F 0x72 11/4. */
3638FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3639
3640/* Opcode VEX.0F 0x72 11/6 - invalid. */
3641/** Opcode VEX.66.0F 0x72 11/6. */
3642FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3643
3644
3645/**
3646 * Group 13 jump table for register variant.
3647 */
3648IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3649{
3650 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3651 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3652 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3653 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3654 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3655 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3656 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3657 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3658};
3659AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3660
3661/** Opcode VEX.0F 0x72. */
3662FNIEMOP_DEF(iemOp_VGrp13)
3663{
3664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3665 if (IEM_IS_MODRM_REG_MODE(bRm))
3666 /* register, register */
3667 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3668 + pVCpu->iem.s.idxPrefix], bRm);
3669 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3670}
3671
3672
3673/* Opcode VEX.0F 0x73 11/2 - invalid. */
3674/** Opcode VEX.66.0F 0x73 11/2. */
3675FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3676
3677/** Opcode VEX.66.0F 0x73 11/3. */
3678FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3679
3680/* Opcode VEX.0F 0x73 11/6 - invalid. */
3681/** Opcode VEX.66.0F 0x73 11/6. */
3682FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3683
3684/** Opcode VEX.66.0F 0x73 11/7. */
3685FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3686
3687/**
3688 * Group 14 jump table for register variant.
3689 */
3690IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3691{
3692 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3693 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3694 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3695 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3696 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3697 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3698 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3699 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3700};
3701AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3702
3703
3704/** Opcode VEX.0F 0x73. */
3705FNIEMOP_DEF(iemOp_VGrp14)
3706{
3707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3708 if (IEM_IS_MODRM_REG_MODE(bRm))
3709 /* register, register */
3710 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3711 + pVCpu->iem.s.idxPrefix], bRm);
3712 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3713}
3714
3715
3716/* Opcode VEX.0F 0x74 - invalid */
3717
3718
3719/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3720FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3721{
3722 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3723 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3724 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3725}
3726
3727/* Opcode VEX.F3.0F 0x74 - invalid */
3728/* Opcode VEX.F2.0F 0x74 - invalid */
3729
3730
3731/* Opcode VEX.0F 0x75 - invalid */
3732
3733
3734/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3735FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3736{
3737 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3738 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3739 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3740}
3741
3742
3743/* Opcode VEX.F3.0F 0x75 - invalid */
3744/* Opcode VEX.F2.0F 0x75 - invalid */
3745
3746
3747/* Opcode VEX.0F 0x76 - invalid */
3748
3749
3750/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3751FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3752{
3753 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3754 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3755 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3756}
3757
3758
3759/* Opcode VEX.F3.0F 0x76 - invalid */
3760/* Opcode VEX.F2.0F 0x76 - invalid */
3761
3762
3763/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3764FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
3765/* Opcode VEX.66.0F 0x77 - invalid */
3766/* Opcode VEX.F3.0F 0x77 - invalid */
3767/* Opcode VEX.F2.0F 0x77 - invalid */
3768
3769/* Opcode VEX.0F 0x78 - invalid */
3770/* Opcode VEX.66.0F 0x78 - invalid */
3771/* Opcode VEX.F3.0F 0x78 - invalid */
3772/* Opcode VEX.F2.0F 0x78 - invalid */
3773
3774/* Opcode VEX.0F 0x79 - invalid */
3775/* Opcode VEX.66.0F 0x79 - invalid */
3776/* Opcode VEX.F3.0F 0x79 - invalid */
3777/* Opcode VEX.F2.0F 0x79 - invalid */
3778
3779/* Opcode VEX.0F 0x7a - invalid */
3780/* Opcode VEX.66.0F 0x7a - invalid */
3781/* Opcode VEX.F3.0F 0x7a - invalid */
3782/* Opcode VEX.F2.0F 0x7a - invalid */
3783
3784/* Opcode VEX.0F 0x7b - invalid */
3785/* Opcode VEX.66.0F 0x7b - invalid */
3786/* Opcode VEX.F3.0F 0x7b - invalid */
3787/* Opcode VEX.F2.0F 0x7b - invalid */
3788
3789/* Opcode VEX.0F 0x7c - invalid */
3790/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3791FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3792/* Opcode VEX.F3.0F 0x7c - invalid */
3793/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3794FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3795
3796/* Opcode VEX.0F 0x7d - invalid */
3797/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3798FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3799/* Opcode VEX.F3.0F 0x7d - invalid */
3800/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3801FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3802
3803
3804/* Opcode VEX.0F 0x7e - invalid */
3805
3806FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3807{
3808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3809 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3810 {
3811 /**
3812 * @opcode 0x7e
3813 * @opcodesub rex.w=1
3814 * @oppfx 0x66
3815 * @opcpuid avx
3816 * @opgroup og_avx_simdint_datamov
3817 * @opxcpttype 5
3818 * @optest 64-bit / op1=1 op2=2 -> op1=2
3819 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3820 */
3821 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3822 if (IEM_IS_MODRM_REG_MODE(bRm))
3823 {
3824 /* greg64, XMM */
3825 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3826 IEM_MC_BEGIN(0, 1);
3827 IEM_MC_LOCAL(uint64_t, u64Tmp);
3828
3829 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3830 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3831
3832 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3833 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3834
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 }
3838 else
3839 {
3840 /* [mem64], XMM */
3841 IEM_MC_BEGIN(0, 2);
3842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3843 IEM_MC_LOCAL(uint64_t, u64Tmp);
3844
3845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3846 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3847 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3848 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3849
3850 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3851 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3852
3853 IEM_MC_ADVANCE_RIP();
3854 IEM_MC_END();
3855 }
3856 }
3857 else
3858 {
3859 /**
3860 * @opdone
3861 * @opcode 0x7e
3862 * @opcodesub rex.w=0
3863 * @oppfx 0x66
3864 * @opcpuid avx
3865 * @opgroup og_avx_simdint_datamov
3866 * @opxcpttype 5
3867 * @opfunction iemOp_vmovd_q_Vy_Ey
3868 * @optest op1=1 op2=2 -> op1=2
3869 * @optest op1=0 op2=-42 -> op1=-42
3870 */
3871 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3872 if (IEM_IS_MODRM_REG_MODE(bRm))
3873 {
3874 /* greg32, XMM */
3875 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3876 IEM_MC_BEGIN(0, 1);
3877 IEM_MC_LOCAL(uint32_t, u32Tmp);
3878
3879 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3880 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3881
3882 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3883 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3884
3885 IEM_MC_ADVANCE_RIP();
3886 IEM_MC_END();
3887 }
3888 else
3889 {
3890 /* [mem32], XMM */
3891 IEM_MC_BEGIN(0, 2);
3892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3893 IEM_MC_LOCAL(uint32_t, u32Tmp);
3894
3895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3896 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3897 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3898 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3899
3900 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3901 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3902
3903 IEM_MC_ADVANCE_RIP();
3904 IEM_MC_END();
3905 }
3906 }
3907 return VINF_SUCCESS;
3908}
3909
3910/**
3911 * @opcode 0x7e
3912 * @oppfx 0xf3
3913 * @opcpuid avx
3914 * @opgroup og_avx_pcksclr_datamove
3915 * @opxcpttype none
3916 * @optest op1=1 op2=2 -> op1=2
3917 * @optest op1=0 op2=-42 -> op1=-42
3918 */
3919FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3920{
3921 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3923 if (IEM_IS_MODRM_REG_MODE(bRm))
3924 {
3925 /*
3926 * Register, register.
3927 */
3928 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3929 IEM_MC_BEGIN(0, 0);
3930
3931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3932 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3933
3934 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3935 IEM_GET_MODRM_RM(pVCpu, bRm));
3936 IEM_MC_ADVANCE_RIP();
3937 IEM_MC_END();
3938 }
3939 else
3940 {
3941 /*
3942 * Memory, register.
3943 */
3944 IEM_MC_BEGIN(0, 2);
3945 IEM_MC_LOCAL(uint64_t, uSrc);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3947
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3949 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3950 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3951 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3952
3953 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3954 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3955
3956 IEM_MC_ADVANCE_RIP();
3957 IEM_MC_END();
3958 }
3959 return VINF_SUCCESS;
3960
3961}
3962/* Opcode VEX.F2.0F 0x7e - invalid */
3963
3964
3965/* Opcode VEX.0F 0x7f - invalid */
3966
3967/**
3968 * @opcode 0x7f
3969 * @oppfx 0x66
3970 * @opcpuid avx
3971 * @opgroup og_avx_simdint_datamove
3972 * @opxcpttype 1
3973 * @optest op1=1 op2=2 -> op1=2
3974 * @optest op1=0 op2=-42 -> op1=-42
3975 */
3976FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3977{
3978 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3979 Assert(pVCpu->iem.s.uVexLength <= 1);
3980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3981 if (IEM_IS_MODRM_REG_MODE(bRm))
3982 {
3983 /*
3984 * Register, register.
3985 */
3986 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3987 IEM_MC_BEGIN(0, 0);
3988
3989 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3990 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3991 if (pVCpu->iem.s.uVexLength == 0)
3992 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3993 IEM_GET_MODRM_REG(pVCpu, bRm));
3994 else
3995 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3996 IEM_GET_MODRM_REG(pVCpu, bRm));
3997 IEM_MC_ADVANCE_RIP();
3998 IEM_MC_END();
3999 }
4000 else if (pVCpu->iem.s.uVexLength == 0)
4001 {
4002 /*
4003 * Register, memory128.
4004 */
4005 IEM_MC_BEGIN(0, 2);
4006 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4008
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4010 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4011 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4012 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4013
4014 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4015 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4016
4017 IEM_MC_ADVANCE_RIP();
4018 IEM_MC_END();
4019 }
4020 else
4021 {
4022 /*
4023 * Register, memory256.
4024 */
4025 IEM_MC_BEGIN(0, 2);
4026 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4028
4029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4030 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4031 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4032 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4033
4034 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4035 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4036
4037 IEM_MC_ADVANCE_RIP();
4038 IEM_MC_END();
4039 }
4040 return VINF_SUCCESS;
4041}
4042
4043/**
4044 * @opcode 0x7f
4045 * @oppfx 0xf3
4046 * @opcpuid avx
4047 * @opgroup og_avx_simdint_datamove
4048 * @opxcpttype 4UA
4049 * @optest op1=1 op2=2 -> op1=2
4050 * @optest op1=0 op2=-42 -> op1=-42
4051 */
4052FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4053{
4054 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4055 Assert(pVCpu->iem.s.uVexLength <= 1);
4056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4057 if (IEM_IS_MODRM_REG_MODE(bRm))
4058 {
4059 /*
4060 * Register, register.
4061 */
4062 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4063 IEM_MC_BEGIN(0, 0);
4064
4065 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4066 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4067 if (pVCpu->iem.s.uVexLength == 0)
4068 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4069 IEM_GET_MODRM_REG(pVCpu, bRm));
4070 else
4071 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4072 IEM_GET_MODRM_REG(pVCpu, bRm));
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 else if (pVCpu->iem.s.uVexLength == 0)
4077 {
4078 /*
4079 * Register, memory128.
4080 */
4081 IEM_MC_BEGIN(0, 2);
4082 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4084
4085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4086 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4087 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4088 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4089
4090 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4091 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4092
4093 IEM_MC_ADVANCE_RIP();
4094 IEM_MC_END();
4095 }
4096 else
4097 {
4098 /*
4099 * Register, memory256.
4100 */
4101 IEM_MC_BEGIN(0, 2);
4102 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4104
4105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4106 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4107 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4108 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4109
4110 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4111 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4112
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 return VINF_SUCCESS;
4117}
4118
4119/* Opcode VEX.F2.0F 0x7f - invalid */
4120
4121
4122/* Opcode VEX.0F 0x80 - invalid */
4123/* Opcode VEX.0F 0x81 - invalid */
4124/* Opcode VEX.0F 0x82 - invalid */
4125/* Opcode VEX.0F 0x83 - invalid */
4126/* Opcode VEX.0F 0x84 - invalid */
4127/* Opcode VEX.0F 0x85 - invalid */
4128/* Opcode VEX.0F 0x86 - invalid */
4129/* Opcode VEX.0F 0x87 - invalid */
4130/* Opcode VEX.0F 0x88 - invalid */
4131/* Opcode VEX.0F 0x89 - invalid */
4132/* Opcode VEX.0F 0x8a - invalid */
4133/* Opcode VEX.0F 0x8b - invalid */
4134/* Opcode VEX.0F 0x8c - invalid */
4135/* Opcode VEX.0F 0x8d - invalid */
4136/* Opcode VEX.0F 0x8e - invalid */
4137/* Opcode VEX.0F 0x8f - invalid */
4138/* Opcode VEX.0F 0x90 - invalid */
4139/* Opcode VEX.0F 0x91 - invalid */
4140/* Opcode VEX.0F 0x92 - invalid */
4141/* Opcode VEX.0F 0x93 - invalid */
4142/* Opcode VEX.0F 0x94 - invalid */
4143/* Opcode VEX.0F 0x95 - invalid */
4144/* Opcode VEX.0F 0x96 - invalid */
4145/* Opcode VEX.0F 0x97 - invalid */
4146/* Opcode VEX.0F 0x98 - invalid */
4147/* Opcode VEX.0F 0x99 - invalid */
4148/* Opcode VEX.0F 0x9a - invalid */
4149/* Opcode VEX.0F 0x9b - invalid */
4150/* Opcode VEX.0F 0x9c - invalid */
4151/* Opcode VEX.0F 0x9d - invalid */
4152/* Opcode VEX.0F 0x9e - invalid */
4153/* Opcode VEX.0F 0x9f - invalid */
4154/* Opcode VEX.0F 0xa0 - invalid */
4155/* Opcode VEX.0F 0xa1 - invalid */
4156/* Opcode VEX.0F 0xa2 - invalid */
4157/* Opcode VEX.0F 0xa3 - invalid */
4158/* Opcode VEX.0F 0xa4 - invalid */
4159/* Opcode VEX.0F 0xa5 - invalid */
4160/* Opcode VEX.0F 0xa6 - invalid */
4161/* Opcode VEX.0F 0xa7 - invalid */
4162/* Opcode VEX.0F 0xa8 - invalid */
4163/* Opcode VEX.0F 0xa9 - invalid */
4164/* Opcode VEX.0F 0xaa - invalid */
4165/* Opcode VEX.0F 0xab - invalid */
4166/* Opcode VEX.0F 0xac - invalid */
4167/* Opcode VEX.0F 0xad - invalid */
4168
4169
4170/* Opcode VEX.0F 0xae mem/0 - invalid. */
4171/* Opcode VEX.0F 0xae mem/1 - invalid. */
4172
4173/**
4174 * @ opmaps grp15
4175 * @ opcode !11/2
4176 * @ oppfx none
4177 * @ opcpuid sse
4178 * @ opgroup og_sse_mxcsrsm
4179 * @ opxcpttype 5
4180 * @ optest op1=0 -> mxcsr=0
4181 * @ optest op1=0x2083 -> mxcsr=0x2083
4182 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4183 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4184 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4185 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4186 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4187 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4188 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4189 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4190 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4191 */
4192FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4193//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4194//{
4195// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4196// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
4197// return IEMOP_RAISE_INVALID_OPCODE();
4198//
4199// IEM_MC_BEGIN(2, 0);
4200// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4201// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4202// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4203// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4205// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4206// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4207// IEM_MC_END();
4208// return VINF_SUCCESS;
4209//}
4210
4211
4212/**
4213 * @opmaps vexgrp15
4214 * @opcode !11/3
4215 * @oppfx none
4216 * @opcpuid avx
4217 * @opgroup og_avx_mxcsrsm
4218 * @opxcpttype 5
4219 * @optest mxcsr=0 -> op1=0
4220 * @optest mxcsr=0x2083 -> op1=0x2083
4221 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4222 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4223 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4224 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4225 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4226 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4227 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4228 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4229 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4230 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4231 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4232 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4233 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4234 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4235 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4236 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4237 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4238 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4239 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4240 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4241 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4242 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4243 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4244 * -> value.xcpt=0x6
4245 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4246 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4247 * APMv4 rev 3.17 page 509.
4248 * @todo Test this instruction on AMD Ryzen.
4249 */
4250FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4251{
4252 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4253 IEM_MC_BEGIN(2, 0);
4254 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4255 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4257 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4258 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4259 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4260 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4261 IEM_MC_END();
4262 return VINF_SUCCESS;
4263}
4264
4265/* Opcode VEX.0F 0xae mem/4 - invalid. */
4266/* Opcode VEX.0F 0xae mem/5 - invalid. */
4267/* Opcode VEX.0F 0xae mem/6 - invalid. */
4268/* Opcode VEX.0F 0xae mem/7 - invalid. */
4269
4270/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4271/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4272/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4273/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4274/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4275/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4276/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4277/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4278
4279/**
4280 * Vex group 15 jump table for memory variant.
4281 */
4282IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4283{ /* pfx: none, 066h, 0f3h, 0f2h */
4284 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4285 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4286 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4287 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4288 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4289 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4290 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4291 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4292};
4293AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4294
4295
4296/** Opcode vex. 0xae. */
4297FNIEMOP_DEF(iemOp_VGrp15)
4298{
4299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4300 if (IEM_IS_MODRM_REG_MODE(bRm))
4301 /* register, register */
4302 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4303
4304 /* memory, register */
4305 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4306 + pVCpu->iem.s.idxPrefix], bRm);
4307}
4308
4309
4310/* Opcode VEX.0F 0xaf - invalid. */
4311
4312/* Opcode VEX.0F 0xb0 - invalid. */
4313/* Opcode VEX.0F 0xb1 - invalid. */
4314/* Opcode VEX.0F 0xb2 - invalid. */
4315/* Opcode VEX.0F 0xb2 - invalid. */
4316/* Opcode VEX.0F 0xb3 - invalid. */
4317/* Opcode VEX.0F 0xb4 - invalid. */
4318/* Opcode VEX.0F 0xb5 - invalid. */
4319/* Opcode VEX.0F 0xb6 - invalid. */
4320/* Opcode VEX.0F 0xb7 - invalid. */
4321/* Opcode VEX.0F 0xb8 - invalid. */
4322/* Opcode VEX.0F 0xb9 - invalid. */
4323/* Opcode VEX.0F 0xba - invalid. */
4324/* Opcode VEX.0F 0xbb - invalid. */
4325/* Opcode VEX.0F 0xbc - invalid. */
4326/* Opcode VEX.0F 0xbd - invalid. */
4327/* Opcode VEX.0F 0xbe - invalid. */
4328/* Opcode VEX.0F 0xbf - invalid. */
4329
4330/* Opcode VEX.0F 0xc0 - invalid. */
4331/* Opcode VEX.66.0F 0xc0 - invalid. */
4332/* Opcode VEX.F3.0F 0xc0 - invalid. */
4333/* Opcode VEX.F2.0F 0xc0 - invalid. */
4334
4335/* Opcode VEX.0F 0xc1 - invalid. */
4336/* Opcode VEX.66.0F 0xc1 - invalid. */
4337/* Opcode VEX.F3.0F 0xc1 - invalid. */
4338/* Opcode VEX.F2.0F 0xc1 - invalid. */
4339
4340/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4341FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4342/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4343FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4344/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4345FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4346/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4347FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4348
4349/* Opcode VEX.0F 0xc3 - invalid */
4350/* Opcode VEX.66.0F 0xc3 - invalid */
4351/* Opcode VEX.F3.0F 0xc3 - invalid */
4352/* Opcode VEX.F2.0F 0xc3 - invalid */
4353
4354/* Opcode VEX.0F 0xc4 - invalid */
4355
4356
4357/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4358FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4359{
4360 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4362 if (IEM_IS_MODRM_REG_MODE(bRm))
4363 {
4364 /*
4365 * Register, register.
4366 */
4367 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4368 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4369 IEM_MC_BEGIN(4, 0);
4370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4371 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4372 IEM_MC_ARG(uint16_t, u16Src, 2);
4373 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4374 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4375 IEM_MC_PREPARE_AVX_USAGE();
4376 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4377 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4378 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4379 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4380 puDst, puSrc, u16Src, bEvilArg);
4381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4382 IEM_MC_ADVANCE_RIP();
4383 IEM_MC_END();
4384 }
4385 else
4386 {
4387 /*
4388 * Register, memory.
4389 */
4390 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4391 IEM_MC_BEGIN(4, 1);
4392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4393 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4394 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4395 IEM_MC_ARG(uint16_t, u16Src, 2);
4396 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4397
4398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4399 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4400 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4401 IEM_MC_PREPARE_AVX_USAGE();
4402
4403 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4404 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4405 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4406 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4407 puDst, puSrc, u16Src, bEvilArg);
4408 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4409
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 }
4413
4414 return VINF_SUCCESS;
4415}
4416
4417
4418/* Opcode VEX.F3.0F 0xc4 - invalid */
4419/* Opcode VEX.F2.0F 0xc4 - invalid */
4420
4421/* Opcode VEX.0F 0xc5 - invlid */
4422
4423
4424/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4425FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4426{
4427 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4429 if (IEM_IS_MODRM_REG_MODE(bRm))
4430 {
4431 /*
4432 * Register, register.
4433 */
4434 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4435 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4436 IEM_MC_BEGIN(3, 1);
4437 IEM_MC_LOCAL(uint16_t, u16Dst);
4438 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4439 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4440 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4441 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4442 IEM_MC_PREPARE_AVX_USAGE();
4443 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4444 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4445 pu16Dst, puSrc, bEvilArg);
4446 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4447 IEM_MC_ADVANCE_RIP();
4448 IEM_MC_END();
4449 return VINF_SUCCESS;
4450 }
4451
4452 /* No memory operand. */
4453 return IEMOP_RAISE_INVALID_OPCODE();
4454}
4455
4456
4457/* Opcode VEX.F3.0F 0xc5 - invalid */
4458/* Opcode VEX.F2.0F 0xc5 - invalid */
4459
4460
4461#define VSHUFP_X(a_Instr) \
4462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4463 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4464 { \
4465 /* \
4466 * Register, register. \
4467 */ \
4468 if (pVCpu->iem.s.uVexLength) \
4469 { \
4470 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4471 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4472 IEM_MC_BEGIN(4, 3); \
4473 IEM_MC_LOCAL(RTUINT256U, uDst); \
4474 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4475 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4476 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4477 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4478 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4479 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4480 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4481 IEM_MC_PREPARE_AVX_USAGE(); \
4482 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4483 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4484 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4485 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4486 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4487 IEM_MC_ADVANCE_RIP(); \
4488 IEM_MC_END(); \
4489 } \
4490 else \
4491 { \
4492 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4493 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4494 IEM_MC_BEGIN(4, 0); \
4495 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4496 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4497 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4498 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4499 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4500 IEM_MC_PREPARE_AVX_USAGE(); \
4501 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4502 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4503 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4504 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4505 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4506 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4507 IEM_MC_ADVANCE_RIP(); \
4508 IEM_MC_END(); \
4509 } \
4510 } \
4511 else \
4512 { \
4513 /* \
4514 * Register, memory. \
4515 */ \
4516 if (pVCpu->iem.s.uVexLength) \
4517 { \
4518 IEM_MC_BEGIN(4, 4); \
4519 IEM_MC_LOCAL(RTUINT256U, uDst); \
4520 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4521 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4523 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4524 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4525 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4527 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4528 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4529 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4530 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4531 IEM_MC_PREPARE_AVX_USAGE(); \
4532 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4533 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4534 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4535 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4536 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4537 IEM_MC_ADVANCE_RIP(); \
4538 IEM_MC_END(); \
4539 } \
4540 else \
4541 { \
4542 IEM_MC_BEGIN(4, 2); \
4543 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4545 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4546 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4547 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4549 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4550 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4551 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4552 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4553 IEM_MC_PREPARE_AVX_USAGE(); \
4554 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4555 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4556 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4557 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4558 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4559 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4560 IEM_MC_ADVANCE_RIP(); \
4561 IEM_MC_END(); \
4562 } \
4563 } \
4564 return VINF_SUCCESS;
4565
4566/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4567FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4568{
4569 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4570 VSHUFP_X(vshufps);
4571}
4572
4573
4574/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4575FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4576{
4577 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4578 VSHUFP_X(vshufpd);
4579}
4580#undef VSHUFP_X
4581
4582
4583/* Opcode VEX.F3.0F 0xc6 - invalid */
4584/* Opcode VEX.F2.0F 0xc6 - invalid */
4585
4586/* Opcode VEX.0F 0xc7 - invalid */
4587/* Opcode VEX.66.0F 0xc7 - invalid */
4588/* Opcode VEX.F3.0F 0xc7 - invalid */
4589/* Opcode VEX.F2.0F 0xc7 - invalid */
4590
4591/* Opcode VEX.0F 0xc8 - invalid */
4592/* Opcode VEX.0F 0xc9 - invalid */
4593/* Opcode VEX.0F 0xca - invalid */
4594/* Opcode VEX.0F 0xcb - invalid */
4595/* Opcode VEX.0F 0xcc - invalid */
4596/* Opcode VEX.0F 0xcd - invalid */
4597/* Opcode VEX.0F 0xce - invalid */
4598/* Opcode VEX.0F 0xcf - invalid */
4599
4600
4601/* Opcode VEX.0F 0xd0 - invalid */
4602/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4603FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4604/* Opcode VEX.F3.0F 0xd0 - invalid */
4605/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4606FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4607
4608/* Opcode VEX.0F 0xd1 - invalid */
4609/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4610FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4611/* Opcode VEX.F3.0F 0xd1 - invalid */
4612/* Opcode VEX.F2.0F 0xd1 - invalid */
4613
4614/* Opcode VEX.0F 0xd2 - invalid */
4615/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4616FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4617/* Opcode VEX.F3.0F 0xd2 - invalid */
4618/* Opcode VEX.F2.0F 0xd2 - invalid */
4619
4620/* Opcode VEX.0F 0xd3 - invalid */
4621/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4622FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4623/* Opcode VEX.F3.0F 0xd3 - invalid */
4624/* Opcode VEX.F2.0F 0xd3 - invalid */
4625
4626/* Opcode VEX.0F 0xd4 - invalid */
4627
4628
4629/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4630FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4631{
4632 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4633 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4634 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4635}
4636
4637
4638/* Opcode VEX.F3.0F 0xd4 - invalid */
4639/* Opcode VEX.F2.0F 0xd4 - invalid */
4640
4641/* Opcode VEX.0F 0xd5 - invalid */
4642
4643
4644/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4645FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4646{
4647 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4648 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4649 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4650}
4651
4652
4653/* Opcode VEX.F3.0F 0xd5 - invalid */
4654/* Opcode VEX.F2.0F 0xd5 - invalid */
4655
4656/* Opcode VEX.0F 0xd6 - invalid */
4657
4658/**
4659 * @opcode 0xd6
4660 * @oppfx 0x66
4661 * @opcpuid avx
4662 * @opgroup og_avx_pcksclr_datamove
4663 * @opxcpttype none
4664 * @optest op1=-1 op2=2 -> op1=2
4665 * @optest op1=0 op2=-42 -> op1=-42
4666 */
4667FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4668{
4669 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4671 if (IEM_IS_MODRM_REG_MODE(bRm))
4672 {
4673 /*
4674 * Register, register.
4675 */
4676 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4677 IEM_MC_BEGIN(0, 0);
4678
4679 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4680 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4681
4682 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4683 IEM_GET_MODRM_REG(pVCpu, bRm));
4684 IEM_MC_ADVANCE_RIP();
4685 IEM_MC_END();
4686 }
4687 else
4688 {
4689 /*
4690 * Memory, register.
4691 */
4692 IEM_MC_BEGIN(0, 2);
4693 IEM_MC_LOCAL(uint64_t, uSrc);
4694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4695
4696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4697 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4698 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4699 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4700
4701 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4702 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4703
4704 IEM_MC_ADVANCE_RIP();
4705 IEM_MC_END();
4706 }
4707 return VINF_SUCCESS;
4708}
4709
4710/* Opcode VEX.F3.0F 0xd6 - invalid */
4711/* Opcode VEX.F2.0F 0xd6 - invalid */
4712
4713
4714/* Opcode VEX.0F 0xd7 - invalid */
4715
4716/** Opcode VEX.66.0F 0xd7 - */
4717FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4718{
4719 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4720 /* Docs says register only. */
4721 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4722 {
4723 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4724 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4725 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4726 if (pVCpu->iem.s.uVexLength)
4727 {
4728 IEM_MC_BEGIN(2, 1);
4729 IEM_MC_ARG(uint64_t *, puDst, 0);
4730 IEM_MC_LOCAL(RTUINT256U, uSrc);
4731 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4732 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4733 IEM_MC_PREPARE_AVX_USAGE();
4734 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4735 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4736 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4737 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4738 IEM_MC_ADVANCE_RIP();
4739 IEM_MC_END();
4740 }
4741 else
4742 {
4743 IEM_MC_BEGIN(2, 0);
4744 IEM_MC_ARG(uint64_t *, puDst, 0);
4745 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4746 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4747 IEM_MC_PREPARE_AVX_USAGE();
4748 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4749 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4750 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 }
4754 return VINF_SUCCESS;
4755 }
4756 return IEMOP_RAISE_INVALID_OPCODE();
4757}
4758
4759
4760/* Opcode VEX.F3.0F 0xd7 - invalid */
4761/* Opcode VEX.F2.0F 0xd7 - invalid */
4762
4763
4764/* Opcode VEX.0F 0xd8 - invalid */
4765/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4766FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4767/* Opcode VEX.F3.0F 0xd8 - invalid */
4768/* Opcode VEX.F2.0F 0xd8 - invalid */
4769
4770/* Opcode VEX.0F 0xd9 - invalid */
4771/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4772FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4773/* Opcode VEX.F3.0F 0xd9 - invalid */
4774/* Opcode VEX.F2.0F 0xd9 - invalid */
4775
4776/* Opcode VEX.0F 0xda - invalid */
4777
4778
4779/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4780FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4781{
4782 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4783 IEMOPMEDIAF3_INIT_VARS(vpminub);
4784 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4785}
4786
4787
4788/* Opcode VEX.F3.0F 0xda - invalid */
4789/* Opcode VEX.F2.0F 0xda - invalid */
4790
4791/* Opcode VEX.0F 0xdb - invalid */
4792
4793
4794/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4795FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4796{
4797 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4798 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4799 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4800}
4801
4802
4803/* Opcode VEX.F3.0F 0xdb - invalid */
4804/* Opcode VEX.F2.0F 0xdb - invalid */
4805
4806/* Opcode VEX.0F 0xdc - invalid */
4807/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4808FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4809/* Opcode VEX.F3.0F 0xdc - invalid */
4810/* Opcode VEX.F2.0F 0xdc - invalid */
4811
4812/* Opcode VEX.0F 0xdd - invalid */
4813/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4814FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4815/* Opcode VEX.F3.0F 0xdd - invalid */
4816/* Opcode VEX.F2.0F 0xdd - invalid */
4817
4818/* Opcode VEX.0F 0xde - invalid */
4819
4820
4821/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4822FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4823{
4824 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4825 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4826 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4827}
4828
4829
4830/* Opcode VEX.F3.0F 0xde - invalid */
4831/* Opcode VEX.F2.0F 0xde - invalid */
4832
4833/* Opcode VEX.0F 0xdf - invalid */
4834
4835
4836/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4837FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4838{
4839 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4840 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4841 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4842}
4843
4844
4845/* Opcode VEX.F3.0F 0xdf - invalid */
4846/* Opcode VEX.F2.0F 0xdf - invalid */
4847
4848/* Opcode VEX.0F 0xe0 - invalid */
4849
4850
4851/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4852FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4853{
4854 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4855 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4856 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4857}
4858
4859
4860/* Opcode VEX.F3.0F 0xe0 - invalid */
4861/* Opcode VEX.F2.0F 0xe0 - invalid */
4862
4863/* Opcode VEX.0F 0xe1 - invalid */
4864/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4865FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4866/* Opcode VEX.F3.0F 0xe1 - invalid */
4867/* Opcode VEX.F2.0F 0xe1 - invalid */
4868
4869/* Opcode VEX.0F 0xe2 - invalid */
4870/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4871FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4872/* Opcode VEX.F3.0F 0xe2 - invalid */
4873/* Opcode VEX.F2.0F 0xe2 - invalid */
4874
4875/* Opcode VEX.0F 0xe3 - invalid */
4876
4877
4878/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4879FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4880{
4881 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4882 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4883 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4884}
4885
4886
4887/* Opcode VEX.F3.0F 0xe3 - invalid */
4888/* Opcode VEX.F2.0F 0xe3 - invalid */
4889
4890/* Opcode VEX.0F 0xe4 - invalid */
4891
4892
4893/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4894FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4895{
4896 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4897 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4898 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4899}
4900
4901
4902/* Opcode VEX.F3.0F 0xe4 - invalid */
4903/* Opcode VEX.F2.0F 0xe4 - invalid */
4904
4905/* Opcode VEX.0F 0xe5 - invalid */
4906
4907
4908/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4909FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4910{
4911 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4912 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4913 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4914}
4915
4916
4917/* Opcode VEX.F3.0F 0xe5 - invalid */
4918/* Opcode VEX.F2.0F 0xe5 - invalid */
4919
4920/* Opcode VEX.0F 0xe6 - invalid */
4921/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4922FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4923/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4924FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4925/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4926FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4927
4928
4929/* Opcode VEX.0F 0xe7 - invalid */
4930
4931/**
4932 * @opcode 0xe7
4933 * @opcodesub !11 mr/reg
4934 * @oppfx 0x66
4935 * @opcpuid avx
4936 * @opgroup og_avx_cachect
4937 * @opxcpttype 1
4938 * @optest op1=-1 op2=2 -> op1=2
4939 * @optest op1=0 op2=-42 -> op1=-42
4940 */
4941FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4942{
4943 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4944 Assert(pVCpu->iem.s.uVexLength <= 1);
4945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4946 if (IEM_IS_MODRM_MEM_MODE(bRm))
4947 {
4948 if (pVCpu->iem.s.uVexLength == 0)
4949 {
4950 /*
4951 * 128-bit: Memory, register.
4952 */
4953 IEM_MC_BEGIN(0, 2);
4954 IEM_MC_LOCAL(RTUINT128U, uSrc);
4955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4958 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4959 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4960 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4961
4962 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4963 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4964
4965 IEM_MC_ADVANCE_RIP();
4966 IEM_MC_END();
4967 }
4968 else
4969 {
4970 /*
4971 * 256-bit: Memory, register.
4972 */
4973 IEM_MC_BEGIN(0, 2);
4974 IEM_MC_LOCAL(RTUINT256U, uSrc);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4978 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4979 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4980 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4981
4982 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4983 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4984
4985 IEM_MC_ADVANCE_RIP();
4986 IEM_MC_END();
4987 }
4988 return VINF_SUCCESS;
4989 }
4990 /**
4991 * @opdone
4992 * @opmnemonic udvex660fe7reg
4993 * @opcode 0xe7
4994 * @opcodesub 11 mr/reg
4995 * @oppfx 0x66
4996 * @opunused immediate
4997 * @opcpuid avx
4998 * @optest ->
4999 */
5000 return IEMOP_RAISE_INVALID_OPCODE();
5001}
5002
5003/* Opcode VEX.F3.0F 0xe7 - invalid */
5004/* Opcode VEX.F2.0F 0xe7 - invalid */
5005
5006
5007/* Opcode VEX.0F 0xe8 - invalid */
5008/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
5009FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
5010/* Opcode VEX.F3.0F 0xe8 - invalid */
5011/* Opcode VEX.F2.0F 0xe8 - invalid */
5012
5013/* Opcode VEX.0F 0xe9 - invalid */
5014/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5015FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
5016/* Opcode VEX.F3.0F 0xe9 - invalid */
5017/* Opcode VEX.F2.0F 0xe9 - invalid */
5018
5019/* Opcode VEX.0F 0xea - invalid */
5020
5021
5022/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5023FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5024{
5025 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5026 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5027 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5028}
5029
5030
5031/* Opcode VEX.F3.0F 0xea - invalid */
5032/* Opcode VEX.F2.0F 0xea - invalid */
5033
5034/* Opcode VEX.0F 0xeb - invalid */
5035
5036
5037/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5038FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5039{
5040 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5041 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5042 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5043}
5044
5045
5046
5047/* Opcode VEX.F3.0F 0xeb - invalid */
5048/* Opcode VEX.F2.0F 0xeb - invalid */
5049
5050/* Opcode VEX.0F 0xec - invalid */
5051/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5052FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
5053/* Opcode VEX.F3.0F 0xec - invalid */
5054/* Opcode VEX.F2.0F 0xec - invalid */
5055
5056/* Opcode VEX.0F 0xed - invalid */
5057/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5058FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
5059/* Opcode VEX.F3.0F 0xed - invalid */
5060/* Opcode VEX.F2.0F 0xed - invalid */
5061
5062/* Opcode VEX.0F 0xee - invalid */
5063
5064
5065/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5066FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5067{
5068 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5069 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5070 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5071}
5072
5073
5074/* Opcode VEX.F3.0F 0xee - invalid */
5075/* Opcode VEX.F2.0F 0xee - invalid */
5076
5077
5078/* Opcode VEX.0F 0xef - invalid */
5079
5080
5081/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5082FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5083{
5084 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5085 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5086 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5087}
5088
5089
5090/* Opcode VEX.F3.0F 0xef - invalid */
5091/* Opcode VEX.F2.0F 0xef - invalid */
5092
5093/* Opcode VEX.0F 0xf0 - invalid */
5094/* Opcode VEX.66.0F 0xf0 - invalid */
5095
5096
5097/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5098FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5099{
5100 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5101 Assert(pVCpu->iem.s.uVexLength <= 1);
5102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5103 if (IEM_IS_MODRM_REG_MODE(bRm))
5104 {
5105 /*
5106 * Register, register - (not implemented, assuming it raises \#UD).
5107 */
5108 return IEMOP_RAISE_INVALID_OPCODE();
5109 }
5110 else if (pVCpu->iem.s.uVexLength == 0)
5111 {
5112 /*
5113 * Register, memory128.
5114 */
5115 IEM_MC_BEGIN(0, 2);
5116 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5118
5119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5121 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5122 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5123
5124 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5125 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5126
5127 IEM_MC_ADVANCE_RIP();
5128 IEM_MC_END();
5129 }
5130 else
5131 {
5132 /*
5133 * Register, memory256.
5134 */
5135 IEM_MC_BEGIN(0, 2);
5136 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5138
5139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5140 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5141 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5142 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5143
5144 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5145 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5146
5147 IEM_MC_ADVANCE_RIP();
5148 IEM_MC_END();
5149 }
5150 return VINF_SUCCESS;
5151}
5152
5153
5154/* Opcode VEX.0F 0xf1 - invalid */
5155/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5156FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5157/* Opcode VEX.F2.0F 0xf1 - invalid */
5158
5159/* Opcode VEX.0F 0xf2 - invalid */
5160/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5161FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5162/* Opcode VEX.F2.0F 0xf2 - invalid */
5163
5164/* Opcode VEX.0F 0xf3 - invalid */
5165/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5166FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5167/* Opcode VEX.F2.0F 0xf3 - invalid */
5168
5169/* Opcode VEX.0F 0xf4 - invalid */
5170
5171
5172/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5173FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5174{
5175 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5176 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5177 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5178}
5179
5180
5181/* Opcode VEX.F2.0F 0xf4 - invalid */
5182
5183/* Opcode VEX.0F 0xf5 - invalid */
5184/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5185FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5186/* Opcode VEX.F2.0F 0xf5 - invalid */
5187
5188/* Opcode VEX.0F 0xf6 - invalid */
5189
5190
5191/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5192FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5193{
5194 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5195 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5196 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5197}
5198
5199
5200/* Opcode VEX.F2.0F 0xf6 - invalid */
5201
5202/* Opcode VEX.0F 0xf7 - invalid */
5203/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5204FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5205/* Opcode VEX.F2.0F 0xf7 - invalid */
5206
5207/* Opcode VEX.0F 0xf8 - invalid */
5208
5209
5210/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5211FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5212{
5213 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5214 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5215 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5216}
5217
5218
5219/* Opcode VEX.F2.0F 0xf8 - invalid */
5220
5221/* Opcode VEX.0F 0xf9 - invalid */
5222
5223
5224/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5225FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5226{
5227 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5228 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5229 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5230}
5231
5232
5233/* Opcode VEX.F2.0F 0xf9 - invalid */
5234
5235/* Opcode VEX.0F 0xfa - invalid */
5236
5237
5238/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5239FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5240{
5241 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5242 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5243 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5244}
5245
5246
5247/* Opcode VEX.F2.0F 0xfa - invalid */
5248
5249/* Opcode VEX.0F 0xfb - invalid */
5250
5251
5252/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5253FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5254{
5255 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5256 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5257 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5258}
5259
5260
5261/* Opcode VEX.F2.0F 0xfb - invalid */
5262
5263/* Opcode VEX.0F 0xfc - invalid */
5264
5265
5266/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5267FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5268{
5269 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5270 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5271 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5272}
5273
5274
5275/* Opcode VEX.F2.0F 0xfc - invalid */
5276
5277/* Opcode VEX.0F 0xfd - invalid */
5278
5279
5280/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5281FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5282{
5283 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5284 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5285 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5286}
5287
5288
5289/* Opcode VEX.F2.0F 0xfd - invalid */
5290
5291/* Opcode VEX.0F 0xfe - invalid */
5292
5293
5294/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5295FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5296{
5297 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5298 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5299 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5300}
5301
5302
5303/* Opcode VEX.F2.0F 0xfe - invalid */
5304
5305
5306/** Opcode **** 0x0f 0xff - UD0 */
5307FNIEMOP_DEF(iemOp_vud0)
5308{
5309 IEMOP_MNEMONIC(vud0, "vud0");
5310 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5311 {
5312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5313#ifndef TST_IEM_CHECK_MC
5314 RTGCPTR GCPtrEff;
5315 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5316 if (rcStrict != VINF_SUCCESS)
5317 return rcStrict;
5318#endif
5319 IEMOP_HLP_DONE_DECODING();
5320 }
5321 return IEMOP_RAISE_INVALID_OPCODE();
5322}
5323
5324
5325
5326/**
5327 * VEX opcode map \#1.
5328 *
5329 * @sa g_apfnTwoByteMap
5330 */
5331IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5332{
5333 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5334 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5335 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5336 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5337 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5338 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5339 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5340 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5341 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5342 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5343 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5344 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5345 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5346 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5347 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5348 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5349 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5350
5351 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5352 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5353 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5354 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5355 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5356 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5357 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5358 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5359 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5360 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5361 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5362 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5363 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5364 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5365 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5366 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5367
5368 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5369 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5370 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5371 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5372 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5373 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5374 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5375 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5376 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5377 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5378 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5379 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5380 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5381 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5382 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5383 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5384
5385 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5386 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5387 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5388 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5389 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5390 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5391 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5392 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5393 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5394 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5395 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5396 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5397 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5398 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5399 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5400 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5401
5402 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5403 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5404 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5405 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5406 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5407 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5408 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5409 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5410 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5411 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5412 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5413 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5414 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5415 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5416 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5417 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5418
5419 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5420 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5421 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5422 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5423 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5424 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5425 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5426 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5427 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5428 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5429 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5430 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5431 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5432 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5433 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5434 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5435
5436 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5437 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5438 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5439 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5440 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5441 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5442 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5443 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5444 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5445 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5446 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5447 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5448 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5449 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5450 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5451 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5452
5453 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5454 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5455 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5456 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5457 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5458 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5459 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5460 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5461 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5462 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5463 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5464 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5465 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5466 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5467 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5468 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5469
5470 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5471 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5472 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5473 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5474 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5475 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5476 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5477 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5478 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5479 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5480 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5481 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5482 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5483 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5484 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5485 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5486
5487 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5488 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5489 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5490 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5491 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5492 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5496 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5497 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5498 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5499 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5500 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5501 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5502 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5503
5504 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5505 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5506 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5507 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5508 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5509 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5510 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5511 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5512 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5513 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5514 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5515 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5516 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5517 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5518 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5519 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5520
5521 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5522 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5523 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5524 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5525 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5526 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5527 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5528 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5529 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5530 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5531 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5532 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5533 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5534 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5535 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5536 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5537
5538 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5539 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5540 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5541 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5542 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5543 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5544 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5545 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5546 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5547 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5548 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5549 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5550 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5551 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5552 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5553 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5554
5555 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5556 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5557 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5558 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5559 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5560 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5561 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5562 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5563 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5564 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5565 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5566 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5567 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5568 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5569 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5570 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5571
5572 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5573 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5574 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5575 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5576 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5577 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5578 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5579 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5580 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5581 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5582 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5583 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5584 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5585 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5586 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5587 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5588
5589 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5590 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5591 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5592 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5593 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5594 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5595 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5596 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5597 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5598 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5599 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5600 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5601 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5602 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5603 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5604 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5605};
5606AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5607/** @} */
5608
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette