VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 99328

Last change on this file since 99328 was 99324, checked in by vboxsync, 23 months ago

VMM/IEM: Use IEMOP_HLP_DONE_VEX_DECODING_*() rather than IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT or IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT to check for AVX and AVX2 cpuid bits, since the latter two are for runtime checks while the former for the decoding stage. OTOH, the AVX CPUID check is unnecessary in the VexMap files, since the VEX prefixes already checks for it - but that can be optimized some other time. Fixed a number of AVX2/AVX mixups resulting from copy&paste or laziness. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 206.3 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 99324 2023-04-06 23:34:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
54 IEM_MC_BEGIN(4, 3);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
74 IEM_MC_BEGIN(4, 0);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145}
146
147
148/**
149 * Common worker for AVX2 instructions on the forms:
150 * - vpxxx xmm0, xmm1, xmm2/mem128
151 * - vpxxx ymm0, ymm1, ymm2/mem256
152 *
153 * Takes function table for function w/o implicit state parameter.
154 *
155 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
156 */
157FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
158{
159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
160 if (IEM_IS_MODRM_REG_MODE(bRm))
161 {
162 /*
163 * Register, register.
164 */
165 if (pVCpu->iem.s.uVexLength)
166 {
167 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
168 IEM_MC_BEGIN(3, 3);
169 IEM_MC_LOCAL(RTUINT256U, uDst);
170 IEM_MC_LOCAL(RTUINT256U, uSrc1);
171 IEM_MC_LOCAL(RTUINT256U, uSrc2);
172 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
173 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
175 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
178 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
180 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_ADVANCE_RIP_AND_FINISH();
182 IEM_MC_END();
183 }
184 else
185 {
186 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
187 IEM_MC_BEGIN(3, 0);
188 IEM_MC_ARG(PRTUINT128U, puDst, 0);
189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_PREPARE_AVX_USAGE();
193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
194 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
195 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
197 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
198 IEM_MC_ADVANCE_RIP_AND_FINISH();
199 IEM_MC_END();
200 }
201 }
202 else
203 {
204 /*
205 * Register, memory.
206 */
207 if (pVCpu->iem.s.uVexLength)
208 {
209 IEM_MC_BEGIN(3, 4);
210 IEM_MC_LOCAL(RTUINT256U, uDst);
211 IEM_MC_LOCAL(RTUINT256U, uSrc1);
212 IEM_MC_LOCAL(RTUINT256U, uSrc2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
214 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
215 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
217
218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
219 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
220 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222
223 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
224 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
226 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
227
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(3, 2);
234 IEM_MC_LOCAL(RTUINT128U, uSrc2);
235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
236 IEM_MC_ARG(PRTUINT128U, puDst, 0);
237 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
239
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
243 IEM_MC_PREPARE_AVX_USAGE();
244
245 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
249 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
250
251 IEM_MC_ADVANCE_RIP_AND_FINISH();
252 IEM_MC_END();
253 }
254 }
255}
256
257
258/**
259 * Common worker for AVX2 instructions on the forms:
260 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
261 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
262 *
263 * The 128-bit memory version of this instruction may elect to skip fetching the
264 * lower 64 bits of the operand. We, however, do not.
265 *
266 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
267 */
268FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
269{
270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
271}
272
273
274/**
275 * Common worker for AVX2 instructions on the forms:
276 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
277 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
278 *
279 * The 128-bit memory version of this instruction may elect to skip fetching the
280 * higher 64 bits of the operand. We, however, do not.
281 *
282 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
283 */
284FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
285{
286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
287}
288
289
290/**
291 * Common worker for AVX2 instructions on the forms:
292 * - vpxxx xmm0, xmm1/mem128
293 * - vpxxx ymm0, ymm1/mem256
294 *
295 * Takes function table for function w/o implicit state parameter.
296 *
297 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
298 */
299FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
300{
301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
302 if (IEM_IS_MODRM_REG_MODE(bRm))
303 {
304 /*
305 * Register, register.
306 */
307 if (pVCpu->iem.s.uVexLength)
308 {
309 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
310 IEM_MC_BEGIN(2, 2);
311 IEM_MC_LOCAL(RTUINT256U, uDst);
312 IEM_MC_LOCAL(RTUINT256U, uSrc);
313 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
314 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
315 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
316 IEM_MC_PREPARE_AVX_USAGE();
317 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
319 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
320 IEM_MC_ADVANCE_RIP_AND_FINISH();
321 IEM_MC_END();
322 }
323 else
324 {
325 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
326 IEM_MC_BEGIN(2, 0);
327 IEM_MC_ARG(PRTUINT128U, puDst, 0);
328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_PREPARE_AVX_USAGE();
331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
332 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
333 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
334 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
335 IEM_MC_ADVANCE_RIP_AND_FINISH();
336 IEM_MC_END();
337 }
338 }
339 else
340 {
341 /*
342 * Register, memory.
343 */
344 if (pVCpu->iem.s.uVexLength)
345 {
346 IEM_MC_BEGIN(2, 3);
347 IEM_MC_LOCAL(RTUINT256U, uDst);
348 IEM_MC_LOCAL(RTUINT256U, uSrc);
349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
350 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
352
353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
354 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
355 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
356 IEM_MC_PREPARE_AVX_USAGE();
357
358 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
360 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365 else
366 {
367 IEM_MC_BEGIN(2, 2);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
371 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
372
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
376 IEM_MC_PREPARE_AVX_USAGE();
377
378 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
379 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
380 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
382
383 IEM_MC_ADVANCE_RIP_AND_FINISH();
384 IEM_MC_END();
385 }
386 }
387}
388
389
390/* Opcode VEX.0F 0x00 - invalid */
391/* Opcode VEX.0F 0x01 - invalid */
392/* Opcode VEX.0F 0x02 - invalid */
393/* Opcode VEX.0F 0x03 - invalid */
394/* Opcode VEX.0F 0x04 - invalid */
395/* Opcode VEX.0F 0x05 - invalid */
396/* Opcode VEX.0F 0x06 - invalid */
397/* Opcode VEX.0F 0x07 - invalid */
398/* Opcode VEX.0F 0x08 - invalid */
399/* Opcode VEX.0F 0x09 - invalid */
400/* Opcode VEX.0F 0x0a - invalid */
401
402/** Opcode VEX.0F 0x0b. */
403FNIEMOP_DEF(iemOp_vud2)
404{
405 IEMOP_MNEMONIC(vud2, "vud2");
406 return IEMOP_RAISE_INVALID_OPCODE();
407}
408
409/* Opcode VEX.0F 0x0c - invalid */
410/* Opcode VEX.0F 0x0d - invalid */
411/* Opcode VEX.0F 0x0e - invalid */
412/* Opcode VEX.0F 0x0f - invalid */
413
414
415/**
416 * @opcode 0x10
417 * @oppfx none
418 * @opcpuid avx
419 * @opgroup og_avx_simdfp_datamove
420 * @opxcpttype 4UA
421 * @optest op1=1 op2=2 -> op1=2
422 * @optest op1=0 op2=-22 -> op1=-22
423 */
424FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
425{
426 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
427 Assert(pVCpu->iem.s.uVexLength <= 1);
428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
429 if (IEM_IS_MODRM_REG_MODE(bRm))
430 {
431 /*
432 * Register, register.
433 */
434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
435 IEM_MC_BEGIN(0, 0);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
438 if (pVCpu->iem.s.uVexLength == 0)
439 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
440 IEM_GET_MODRM_RM(pVCpu, bRm));
441 else
442 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 IEM_MC_ADVANCE_RIP_AND_FINISH();
445 IEM_MC_END();
446 }
447 else if (pVCpu->iem.s.uVexLength == 0)
448 {
449 /*
450 * 128-bit: Register, Memory
451 */
452 IEM_MC_BEGIN(0, 2);
453 IEM_MC_LOCAL(RTUINT128U, uSrc);
454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455
456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
460
461 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
463
464 IEM_MC_ADVANCE_RIP_AND_FINISH();
465 IEM_MC_END();
466 }
467 else
468 {
469 /*
470 * 256-bit: Register, Memory
471 */
472 IEM_MC_BEGIN(0, 2);
473 IEM_MC_LOCAL(RTUINT256U, uSrc);
474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
480
481 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
483
484 IEM_MC_ADVANCE_RIP_AND_FINISH();
485 IEM_MC_END();
486 }
487}
488
489
490/**
491 * @opcode 0x10
492 * @oppfx 0x66
493 * @opcpuid avx
494 * @opgroup og_avx_simdfp_datamove
495 * @opxcpttype 4UA
496 * @optest op1=1 op2=2 -> op1=2
497 * @optest op1=0 op2=-22 -> op1=-22
498 */
499FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
500{
501 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
502 Assert(pVCpu->iem.s.uVexLength <= 1);
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 if (IEM_IS_MODRM_REG_MODE(bRm))
505 {
506 /*
507 * Register, register.
508 */
509 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
510 IEM_MC_BEGIN(0, 0);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
513 if (pVCpu->iem.s.uVexLength == 0)
514 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
515 IEM_GET_MODRM_RM(pVCpu, bRm));
516 else
517 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
518 IEM_GET_MODRM_RM(pVCpu, bRm));
519 IEM_MC_ADVANCE_RIP_AND_FINISH();
520 IEM_MC_END();
521 }
522 else if (pVCpu->iem.s.uVexLength == 0)
523 {
524 /*
525 * 128-bit: Memory, register.
526 */
527 IEM_MC_BEGIN(0, 2);
528 IEM_MC_LOCAL(RTUINT128U, uSrc);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
530
531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
535
536 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
537 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
538
539 IEM_MC_ADVANCE_RIP_AND_FINISH();
540 IEM_MC_END();
541 }
542 else
543 {
544 /*
545 * 256-bit: Memory, register.
546 */
547 IEM_MC_BEGIN(0, 2);
548 IEM_MC_LOCAL(RTUINT256U, uSrc);
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
555
556 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
558
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562}
563
564
565FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
566{
567 Assert(pVCpu->iem.s.uVexLength <= 1);
568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
569 if (IEM_IS_MODRM_REG_MODE(bRm))
570 {
571 /**
572 * @opcode 0x10
573 * @oppfx 0xf3
574 * @opcodesub 11 mr/reg
575 * @opcpuid avx
576 * @opgroup og_avx_simdfp_datamerge
577 * @opxcpttype 5
578 * @optest op1=1 op2=0 op3=2 -> op1=2
579 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
580 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
581 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
582 * @note HssHi refers to bits 127:32.
583 */
584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
585 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
586 IEM_MC_BEGIN(0, 0);
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
589 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
590 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
591 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 else
596 {
597 /**
598 * @opdone
599 * @opcode 0x10
600 * @oppfx 0xf3
601 * @opcodesub !11 mr/reg
602 * @opcpuid avx
603 * @opgroup og_avx_simdfp_datamove
604 * @opxcpttype 5
605 * @opfunction iemOp_vmovss_Vss_Hss_Wss
606 * @optest op1=1 op2=2 -> op1=2
607 * @optest op1=0 op2=-22 -> op1=-22
608 */
609 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
610 IEM_MC_BEGIN(0, 2);
611 IEM_MC_LOCAL(uint32_t, uSrc);
612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
613
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
618
619 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
620 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
621
622 IEM_MC_ADVANCE_RIP_AND_FINISH();
623 IEM_MC_END();
624 }
625}
626
627
628FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
629{
630 Assert(pVCpu->iem.s.uVexLength <= 1);
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (IEM_IS_MODRM_REG_MODE(bRm))
633 {
634 /**
635 * @opcode 0x10
636 * @oppfx 0xf2
637 * @opcodesub 11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamerge
640 * @opxcpttype 5
641 * @optest op1=1 op2=0 op3=2 -> op1=2
642 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
643 * @optest op1=3 op2=-1 op3=0x77 ->
644 * op1=0xffffffffffffffff0000000000000077
645 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
646 */
647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
648 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
649 IEM_MC_BEGIN(0, 0);
650
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
653 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
654 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
655 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /**
662 * @opdone
663 * @opcode 0x10
664 * @oppfx 0xf2
665 * @opcodesub !11 mr/reg
666 * @opcpuid avx
667 * @opgroup og_avx_simdfp_datamove
668 * @opxcpttype 5
669 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
670 * @optest op1=1 op2=2 -> op1=2
671 * @optest op1=0 op2=-22 -> op1=-22
672 */
673 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
674 IEM_MC_BEGIN(0, 2);
675 IEM_MC_LOCAL(uint64_t, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
712 IEM_MC_BEGIN(0, 0);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764}
765
766
767/**
768 * @opcode 0x11
769 * @oppfx 0x66
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 4UA
773 * @optest op1=1 op2=2 -> op1=2
774 * @optest op1=0 op2=-22 -> op1=-22
775 */
776FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
777{
778 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
779 Assert(pVCpu->iem.s.uVexLength <= 1);
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
787 IEM_MC_BEGIN(0, 0);
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790 if (pVCpu->iem.s.uVexLength == 0)
791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
792 IEM_GET_MODRM_REG(pVCpu, bRm));
793 else
794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
795 IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else if (pVCpu->iem.s.uVexLength == 0)
800 {
801 /*
802 * 128-bit: Memory, register.
803 */
804 IEM_MC_BEGIN(0, 2);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807
808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
812
813 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
814 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
815
816 IEM_MC_ADVANCE_RIP_AND_FINISH();
817 IEM_MC_END();
818 }
819 else
820 {
821 /*
822 * 256-bit: Memory, register.
823 */
824 IEM_MC_BEGIN(0, 2);
825 IEM_MC_LOCAL(RTUINT256U, uSrc);
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
832
833 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
834 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
835
836 IEM_MC_ADVANCE_RIP_AND_FINISH();
837 IEM_MC_END();
838 }
839}
840
841
842FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
843{
844 Assert(pVCpu->iem.s.uVexLength <= 1);
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /**
849 * @opcode 0x11
850 * @oppfx 0xf3
851 * @opcodesub 11 mr/reg
852 * @opcpuid avx
853 * @opgroup og_avx_simdfp_datamerge
854 * @opxcpttype 5
855 * @optest op1=1 op2=0 op3=2 -> op1=2
856 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
857 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
858 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
859 */
860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
861 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
862 IEM_MC_BEGIN(0, 0);
863
864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
865 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
866 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
867 IEM_GET_MODRM_REG(pVCpu, bRm),
868 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872 else
873 {
874 /**
875 * @opdone
876 * @opcode 0x11
877 * @oppfx 0xf3
878 * @opcodesub !11 mr/reg
879 * @opcpuid avx
880 * @opgroup og_avx_simdfp_datamove
881 * @opxcpttype 5
882 * @opfunction iemOp_vmovss_Vss_Hss_Wss
883 * @optest op1=1 op2=2 -> op1=2
884 * @optest op1=0 op2=-22 -> op1=-22
885 */
886 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
887 IEM_MC_BEGIN(0, 2);
888 IEM_MC_LOCAL(uint32_t, uSrc);
889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
890
891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
895
896 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
898
899 IEM_MC_ADVANCE_RIP_AND_FINISH();
900 IEM_MC_END();
901 }
902}
903
904
905FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
906{
907 Assert(pVCpu->iem.s.uVexLength <= 1);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if (IEM_IS_MODRM_REG_MODE(bRm))
910 {
911 /**
912 * @opcode 0x11
913 * @oppfx 0xf2
914 * @opcodesub 11 mr/reg
915 * @opcpuid avx
916 * @opgroup og_avx_simdfp_datamerge
917 * @opxcpttype 5
918 * @optest op1=1 op2=0 op3=2 -> op1=2
919 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
920 * @optest op1=3 op2=-1 op3=0x77 ->
921 * op1=0xffffffffffffffff0000000000000077
922 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
923 */
924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
925 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
926 IEM_MC_BEGIN(0, 0);
927
928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
930 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
931 IEM_GET_MODRM_REG(pVCpu, bRm),
932 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936 else
937 {
938 /**
939 * @opdone
940 * @opcode 0x11
941 * @oppfx 0xf2
942 * @opcodesub !11 mr/reg
943 * @opcpuid avx
944 * @opgroup og_avx_simdfp_datamove
945 * @opxcpttype 5
946 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
947 * @optest op1=1 op2=2 -> op1=2
948 * @optest op1=0 op2=-22 -> op1=-22
949 */
950 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
951 IEM_MC_BEGIN(0, 2);
952 IEM_MC_LOCAL(uint64_t, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966}
967
968
969FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (IEM_IS_MODRM_REG_MODE(bRm))
973 {
974 /**
975 * @opcode 0x12
976 * @opcodesub 11 mr/reg
977 * @oppfx none
978 * @opcpuid avx
979 * @opgroup og_avx_simdfp_datamerge
980 * @opxcpttype 7LZ
981 * @optest op2=0x2200220122022203
982 * op3=0x3304330533063307
983 * -> op1=0x22002201220222033304330533063307
984 * @optest op2=-1 op3=-42 -> op1=-42
985 * @note op3 and op2 are only the 8-byte high XMM register halfs.
986 */
987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
988
989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
990 IEM_MC_BEGIN(0, 0);
991
992 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
993 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
994 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
995 IEM_GET_MODRM_RM(pVCpu, bRm),
996 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001 else
1002 {
1003 /**
1004 * @opdone
1005 * @opcode 0x12
1006 * @opcodesub !11 mr/reg
1007 * @oppfx none
1008 * @opcpuid avx
1009 * @opgroup og_avx_simdfp_datamove
1010 * @opxcpttype 5LZ
1011 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1012 * @optest op1=1 op2=0 op3=0 -> op1=0
1013 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1014 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1015 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1016 */
1017 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1018
1019 IEM_MC_BEGIN(0, 2);
1020 IEM_MC_LOCAL(uint64_t, uSrc);
1021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1022
1023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1024 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1027
1028 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1029 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1030 uSrc,
1031 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1032
1033 IEM_MC_ADVANCE_RIP_AND_FINISH();
1034 IEM_MC_END();
1035 }
1036}
1037
1038
1039/**
1040 * @opcode 0x12
1041 * @opcodesub !11 mr/reg
1042 * @oppfx 0x66
1043 * @opcpuid avx
1044 * @opgroup og_avx_pcksclr_datamerge
1045 * @opxcpttype 5LZ
1046 * @optest op2=0 op3=2 -> op1=2
1047 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1048 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1049 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1050 */
1051FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1052{
1053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1054 if (IEM_IS_MODRM_MEM_MODE(bRm))
1055 {
1056 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1057
1058 IEM_MC_BEGIN(0, 2);
1059 IEM_MC_LOCAL(uint64_t, uSrc);
1060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1061
1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1063 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1066
1067 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1068 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1069 uSrc,
1070 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1071
1072 IEM_MC_ADVANCE_RIP_AND_FINISH();
1073 IEM_MC_END();
1074 }
1075
1076 /**
1077 * @opdone
1078 * @opmnemonic udvex660f12m3
1079 * @opcode 0x12
1080 * @opcodesub 11 mr/reg
1081 * @oppfx 0x66
1082 * @opunused immediate
1083 * @opcpuid avx
1084 * @optest ->
1085 */
1086 else
1087 return IEMOP_RAISE_INVALID_OPCODE();
1088}
1089
1090
1091/**
1092 * @opcode 0x12
1093 * @oppfx 0xf3
1094 * @opcpuid avx
1095 * @opgroup og_avx_pcksclr_datamove
1096 * @opxcpttype 4
1097 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1098 * -> op1=0x00000002000000020000000100000001
1099 * @optest vex.l==1 /
1100 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1101 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1102 */
1103FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1104{
1105 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1106 Assert(pVCpu->iem.s.uVexLength <= 1);
1107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1108 if (IEM_IS_MODRM_REG_MODE(bRm))
1109 {
1110 /*
1111 * Register, register.
1112 */
1113 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1114 if (pVCpu->iem.s.uVexLength == 0)
1115 {
1116 IEM_MC_BEGIN(0, 1);
1117 IEM_MC_LOCAL(RTUINT128U, uSrc);
1118
1119 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1120 IEM_MC_PREPARE_AVX_USAGE();
1121
1122 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1123 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1124 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1125 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1126 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1127 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1128
1129 IEM_MC_ADVANCE_RIP_AND_FINISH();
1130 IEM_MC_END();
1131 }
1132 else
1133 {
1134 IEM_MC_BEGIN(3, 0);
1135 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1136 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1137 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1138
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1142
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 }
1147 else
1148 {
1149 /*
1150 * Register, memory.
1151 */
1152 if (pVCpu->iem.s.uVexLength == 0)
1153 {
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1161 IEM_MC_PREPARE_AVX_USAGE();
1162
1163 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1164 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1165 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1166 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1167 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1168 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1169
1170 IEM_MC_ADVANCE_RIP_AND_FINISH();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 IEM_MC_BEGIN(3, 2);
1176 IEM_MC_LOCAL(RTUINT256U, uSrc);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1179 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1188 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1189
1190 IEM_MC_ADVANCE_RIP_AND_FINISH();
1191 IEM_MC_END();
1192 }
1193 }
1194}
1195
1196
1197/**
1198 * @opcode 0x12
1199 * @oppfx 0xf2
1200 * @opcpuid avx
1201 * @opgroup og_avx_pcksclr_datamove
1202 * @opxcpttype 5
1203 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1204 * -> op1=0x22222222111111112222222211111111
1205 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1206 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1207 */
1208FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1209{
1210 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1218 if (pVCpu->iem.s.uVexLength == 0)
1219 {
1220 IEM_MC_BEGIN(1, 0);
1221 IEM_MC_ARG(uint64_t, uSrc, 0);
1222
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1227 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1229 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0);
1237 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1238 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1239 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1244
1245 IEM_MC_ADVANCE_RIP_AND_FINISH();
1246 IEM_MC_END();
1247 }
1248 }
1249 else
1250 {
1251 /*
1252 * Register, memory.
1253 */
1254 if (pVCpu->iem.s.uVexLength == 0)
1255 {
1256 IEM_MC_BEGIN(1, 1);
1257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1258 IEM_MC_ARG(uint64_t, uSrc, 0);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1263 IEM_MC_PREPARE_AVX_USAGE();
1264
1265 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1266 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1267 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1268 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1269
1270 IEM_MC_ADVANCE_RIP_AND_FINISH();
1271 IEM_MC_END();
1272 }
1273 else
1274 {
1275 IEM_MC_BEGIN(3, 2);
1276 IEM_MC_LOCAL(RTUINT256U, uSrc);
1277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1278 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1279 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1280 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1281
1282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1283 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1284 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1285 IEM_MC_PREPARE_AVX_USAGE();
1286
1287 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1288 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1289
1290 IEM_MC_ADVANCE_RIP_AND_FINISH();
1291 IEM_MC_END();
1292 }
1293 }
1294}
1295
1296
1297/**
1298 * @opcode 0x13
1299 * @opcodesub !11 mr/reg
1300 * @oppfx none
1301 * @opcpuid avx
1302 * @opgroup og_avx_simdfp_datamove
1303 * @opxcpttype 5
1304 * @optest op1=1 op2=2 -> op1=2
1305 * @optest op1=0 op2=-42 -> op1=-42
1306 */
1307FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1308{
1309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1310 if (IEM_IS_MODRM_MEM_MODE(bRm))
1311 {
1312 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1313
1314 IEM_MC_BEGIN(0, 2);
1315 IEM_MC_LOCAL(uint64_t, uSrc);
1316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1317
1318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1319 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1321 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1322
1323 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1324 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1325
1326 IEM_MC_ADVANCE_RIP_AND_FINISH();
1327 IEM_MC_END();
1328 }
1329
1330 /**
1331 * @opdone
1332 * @opmnemonic udvex0f13m3
1333 * @opcode 0x13
1334 * @opcodesub 11 mr/reg
1335 * @oppfx none
1336 * @opunused immediate
1337 * @opcpuid avx
1338 * @optest ->
1339 */
1340 else
1341 return IEMOP_RAISE_INVALID_OPCODE();
1342}
1343
1344
1345/**
1346 * @opcode 0x13
1347 * @opcodesub !11 mr/reg
1348 * @oppfx 0x66
1349 * @opcpuid avx
1350 * @opgroup og_avx_pcksclr_datamove
1351 * @opxcpttype 5
1352 * @optest op1=1 op2=2 -> op1=2
1353 * @optest op1=0 op2=-42 -> op1=-42
1354 */
1355FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1356{
1357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1358 if (IEM_IS_MODRM_MEM_MODE(bRm))
1359 {
1360 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1361 IEM_MC_BEGIN(0, 2);
1362 IEM_MC_LOCAL(uint64_t, uSrc);
1363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1364
1365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1366 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1367 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1368 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1369
1370 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1371 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1372
1373 IEM_MC_ADVANCE_RIP_AND_FINISH();
1374 IEM_MC_END();
1375 }
1376
1377 /**
1378 * @opdone
1379 * @opmnemonic udvex660f13m3
1380 * @opcode 0x13
1381 * @opcodesub 11 mr/reg
1382 * @oppfx 0x66
1383 * @opunused immediate
1384 * @opcpuid avx
1385 * @optest ->
1386 */
1387 else
1388 return IEMOP_RAISE_INVALID_OPCODE();
1389}
1390
1391/* Opcode VEX.F3.0F 0x13 - invalid */
1392/* Opcode VEX.F2.0F 0x13 - invalid */
1393
1394/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1395FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1396{
1397 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1398 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1399 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1400}
1401
1402
1403/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1404FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1405{
1406 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1407 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1408 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1409}
1410
1411
1412/* Opcode VEX.F3.0F 0x14 - invalid */
1413/* Opcode VEX.F2.0F 0x14 - invalid */
1414
1415
1416/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1417FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1418{
1419 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1420 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1421 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1422}
1423
1424
1425/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1426FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1427{
1428 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1429 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1430 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1431}
1432
1433
1434/* Opcode VEX.F3.0F 0x15 - invalid */
1435/* Opcode VEX.F2.0F 0x15 - invalid */
1436
1437
1438FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1439{
1440 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1441 if (IEM_IS_MODRM_REG_MODE(bRm))
1442 {
1443 /**
1444 * @opcode 0x16
1445 * @opcodesub 11 mr/reg
1446 * @oppfx none
1447 * @opcpuid avx
1448 * @opgroup og_avx_simdfp_datamerge
1449 * @opxcpttype 7LZ
1450 */
1451 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1452
1453 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1454 IEM_MC_BEGIN(0, 0);
1455
1456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1457 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1458 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1459 IEM_GET_MODRM_RM(pVCpu, bRm),
1460 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1461
1462 IEM_MC_ADVANCE_RIP_AND_FINISH();
1463 IEM_MC_END();
1464 }
1465 else
1466 {
1467 /**
1468 * @opdone
1469 * @opcode 0x16
1470 * @opcodesub !11 mr/reg
1471 * @oppfx none
1472 * @opcpuid avx
1473 * @opgroup og_avx_simdfp_datamove
1474 * @opxcpttype 5LZ
1475 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1476 */
1477 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1478
1479 IEM_MC_BEGIN(0, 2);
1480 IEM_MC_LOCAL(uint64_t, uSrc);
1481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1482
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1485 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1486 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1487
1488 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1489 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1490 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1491 uSrc);
1492
1493 IEM_MC_ADVANCE_RIP_AND_FINISH();
1494 IEM_MC_END();
1495 }
1496}
1497
1498
1499/**
1500 * @opcode 0x16
1501 * @opcodesub !11 mr/reg
1502 * @oppfx 0x66
1503 * @opcpuid avx
1504 * @opgroup og_avx_pcksclr_datamerge
1505 * @opxcpttype 5LZ
1506 */
1507FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1508{
1509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1510 if (IEM_IS_MODRM_MEM_MODE(bRm))
1511 {
1512 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1513
1514 IEM_MC_BEGIN(0, 2);
1515 IEM_MC_LOCAL(uint64_t, uSrc);
1516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1517
1518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1519 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1521 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1522
1523 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1524 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1525 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1526 uSrc);
1527
1528 IEM_MC_ADVANCE_RIP_AND_FINISH();
1529 IEM_MC_END();
1530 }
1531
1532 /**
1533 * @opdone
1534 * @opmnemonic udvex660f16m3
1535 * @opcode 0x12
1536 * @opcodesub 11 mr/reg
1537 * @oppfx 0x66
1538 * @opunused immediate
1539 * @opcpuid avx
1540 * @optest ->
1541 */
1542 else
1543 return IEMOP_RAISE_INVALID_OPCODE();
1544}
1545
1546
1547/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1548/**
1549 * @opcode 0x16
1550 * @oppfx 0xf3
1551 * @opcpuid avx
1552 * @opgroup og_avx_pcksclr_datamove
1553 * @opxcpttype 4
1554 */
1555FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1556{
1557 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1558 Assert(pVCpu->iem.s.uVexLength <= 1);
1559 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1560 if (IEM_IS_MODRM_REG_MODE(bRm))
1561 {
1562 /*
1563 * Register, register.
1564 */
1565 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1566 if (pVCpu->iem.s.uVexLength == 0)
1567 {
1568 IEM_MC_BEGIN(0, 1);
1569 IEM_MC_LOCAL(RTUINT128U, uSrc);
1570
1571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1572 IEM_MC_PREPARE_AVX_USAGE();
1573
1574 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1575 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1579 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1580
1581 IEM_MC_ADVANCE_RIP_AND_FINISH();
1582 IEM_MC_END();
1583 }
1584 else
1585 {
1586 IEM_MC_BEGIN(3, 0);
1587 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1588 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1589 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1590
1591 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1592 IEM_MC_PREPARE_AVX_USAGE();
1593 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1594
1595 IEM_MC_ADVANCE_RIP_AND_FINISH();
1596 IEM_MC_END();
1597 }
1598 }
1599 else
1600 {
1601 /*
1602 * Register, memory.
1603 */
1604 if (pVCpu->iem.s.uVexLength == 0)
1605 {
1606 IEM_MC_BEGIN(0, 2);
1607 IEM_MC_LOCAL(RTUINT128U, uSrc);
1608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1609
1610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1613 IEM_MC_PREPARE_AVX_USAGE();
1614
1615 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1616 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1617 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1620 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1621
1622 IEM_MC_ADVANCE_RIP_AND_FINISH();
1623 IEM_MC_END();
1624 }
1625 else
1626 {
1627 IEM_MC_BEGIN(3, 2);
1628 IEM_MC_LOCAL(RTUINT256U, uSrc);
1629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1630 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1631 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1632 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1633
1634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1635 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1637 IEM_MC_PREPARE_AVX_USAGE();
1638
1639 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1640 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1641
1642 IEM_MC_ADVANCE_RIP_AND_FINISH();
1643 IEM_MC_END();
1644 }
1645 }
1646}
1647
1648
1649/* Opcode VEX.F2.0F 0x16 - invalid */
1650
1651
1652/**
1653 * @opcode 0x17
1654 * @opcodesub !11 mr/reg
1655 * @oppfx none
1656 * @opcpuid avx
1657 * @opgroup og_avx_simdfp_datamove
1658 * @opxcpttype 5
1659 */
1660FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1661{
1662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1663 if (IEM_IS_MODRM_MEM_MODE(bRm))
1664 {
1665 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1666
1667 IEM_MC_BEGIN(0, 2);
1668 IEM_MC_LOCAL(uint64_t, uSrc);
1669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1670
1671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1672 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1674 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1675
1676 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1677 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1678
1679 IEM_MC_ADVANCE_RIP_AND_FINISH();
1680 IEM_MC_END();
1681 }
1682
1683 /**
1684 * @opdone
1685 * @opmnemonic udvex0f17m3
1686 * @opcode 0x17
1687 * @opcodesub 11 mr/reg
1688 * @oppfx none
1689 * @opunused immediate
1690 * @opcpuid avx
1691 * @optest ->
1692 */
1693 else
1694 return IEMOP_RAISE_INVALID_OPCODE();
1695}
1696
1697
1698/**
1699 * @opcode 0x17
1700 * @opcodesub !11 mr/reg
1701 * @oppfx 0x66
1702 * @opcpuid avx
1703 * @opgroup og_avx_pcksclr_datamove
1704 * @opxcpttype 5
1705 */
1706FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1707{
1708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1709 if (IEM_IS_MODRM_MEM_MODE(bRm))
1710 {
1711 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1712 IEM_MC_BEGIN(0, 2);
1713 IEM_MC_LOCAL(uint64_t, uSrc);
1714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1715
1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1717 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1720
1721 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1722 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1723
1724 IEM_MC_ADVANCE_RIP_AND_FINISH();
1725 IEM_MC_END();
1726 }
1727
1728 /**
1729 * @opdone
1730 * @opmnemonic udvex660f17m3
1731 * @opcode 0x17
1732 * @opcodesub 11 mr/reg
1733 * @oppfx 0x66
1734 * @opunused immediate
1735 * @opcpuid avx
1736 * @optest ->
1737 */
1738 else
1739 return IEMOP_RAISE_INVALID_OPCODE();
1740}
1741
1742
1743/* Opcode VEX.F3.0F 0x17 - invalid */
1744/* Opcode VEX.F2.0F 0x17 - invalid */
1745
1746
1747/* Opcode VEX.0F 0x18 - invalid */
1748/* Opcode VEX.0F 0x19 - invalid */
1749/* Opcode VEX.0F 0x1a - invalid */
1750/* Opcode VEX.0F 0x1b - invalid */
1751/* Opcode VEX.0F 0x1c - invalid */
1752/* Opcode VEX.0F 0x1d - invalid */
1753/* Opcode VEX.0F 0x1e - invalid */
1754/* Opcode VEX.0F 0x1f - invalid */
1755
1756/* Opcode VEX.0F 0x20 - invalid */
1757/* Opcode VEX.0F 0x21 - invalid */
1758/* Opcode VEX.0F 0x22 - invalid */
1759/* Opcode VEX.0F 0x23 - invalid */
1760/* Opcode VEX.0F 0x24 - invalid */
1761/* Opcode VEX.0F 0x25 - invalid */
1762/* Opcode VEX.0F 0x26 - invalid */
1763/* Opcode VEX.0F 0x27 - invalid */
1764
1765/**
1766 * @opcode 0x28
1767 * @oppfx none
1768 * @opcpuid avx
1769 * @opgroup og_avx_pcksclr_datamove
1770 * @opxcpttype 1
1771 * @optest op1=1 op2=2 -> op1=2
1772 * @optest op1=0 op2=-42 -> op1=-42
1773 * @note Almost identical to vmovapd.
1774 */
1775FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1776{
1777 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1779 Assert(pVCpu->iem.s.uVexLength <= 1);
1780 if (IEM_IS_MODRM_REG_MODE(bRm))
1781 {
1782 /*
1783 * Register, register.
1784 */
1785 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1786 IEM_MC_BEGIN(1, 0);
1787
1788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1790 if (pVCpu->iem.s.uVexLength == 0)
1791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1792 IEM_GET_MODRM_RM(pVCpu, bRm));
1793 else
1794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1795 IEM_GET_MODRM_RM(pVCpu, bRm));
1796 IEM_MC_ADVANCE_RIP_AND_FINISH();
1797 IEM_MC_END();
1798 }
1799 else
1800 {
1801 /*
1802 * Register, memory.
1803 */
1804 if (pVCpu->iem.s.uVexLength == 0)
1805 {
1806 IEM_MC_BEGIN(0, 2);
1807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1808 IEM_MC_LOCAL(RTUINT128U, uSrc);
1809
1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1811 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1812 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1813 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1814
1815 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1816 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1817
1818 IEM_MC_ADVANCE_RIP_AND_FINISH();
1819 IEM_MC_END();
1820 }
1821 else
1822 {
1823 IEM_MC_BEGIN(0, 2);
1824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1825 IEM_MC_LOCAL(RTUINT256U, uSrc);
1826
1827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1828 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1829 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1830 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1831
1832 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1833 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1834
1835 IEM_MC_ADVANCE_RIP_AND_FINISH();
1836 IEM_MC_END();
1837 }
1838 }
1839}
1840
1841
1842/**
1843 * @opcode 0x28
1844 * @oppfx 66
1845 * @opcpuid avx
1846 * @opgroup og_avx_pcksclr_datamove
1847 * @opxcpttype 1
1848 * @optest op1=1 op2=2 -> op1=2
1849 * @optest op1=0 op2=-42 -> op1=-42
1850 * @note Almost identical to vmovaps
1851 */
1852FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1853{
1854 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 Assert(pVCpu->iem.s.uVexLength <= 1);
1857 if (IEM_IS_MODRM_REG_MODE(bRm))
1858 {
1859 /*
1860 * Register, register.
1861 */
1862 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1863 IEM_MC_BEGIN(1, 0);
1864
1865 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1866 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1867 if (pVCpu->iem.s.uVexLength == 0)
1868 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1869 IEM_GET_MODRM_RM(pVCpu, bRm));
1870 else
1871 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1872 IEM_GET_MODRM_RM(pVCpu, bRm));
1873 IEM_MC_ADVANCE_RIP_AND_FINISH();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 /*
1879 * Register, memory.
1880 */
1881 if (pVCpu->iem.s.uVexLength == 0)
1882 {
1883 IEM_MC_BEGIN(0, 2);
1884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1885 IEM_MC_LOCAL(RTUINT128U, uSrc);
1886
1887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1888 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1890 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1891
1892 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1893 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1894
1895 IEM_MC_ADVANCE_RIP_AND_FINISH();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 IEM_MC_BEGIN(0, 2);
1901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1902 IEM_MC_LOCAL(RTUINT256U, uSrc);
1903
1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1905 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1906 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1907 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1908
1909 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1910 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1911
1912 IEM_MC_ADVANCE_RIP_AND_FINISH();
1913 IEM_MC_END();
1914 }
1915 }
1916}
1917
1918/**
1919 * @opmnemonic udvexf30f28
1920 * @opcode 0x28
1921 * @oppfx 0xf3
1922 * @opunused vex.modrm
1923 * @opcpuid avx
1924 * @optest ->
1925 * @opdone
1926 */
1927
1928/**
1929 * @opmnemonic udvexf20f28
1930 * @opcode 0x28
1931 * @oppfx 0xf2
1932 * @opunused vex.modrm
1933 * @opcpuid avx
1934 * @optest ->
1935 * @opdone
1936 */
1937
1938/**
1939 * @opcode 0x29
1940 * @oppfx none
1941 * @opcpuid avx
1942 * @opgroup og_avx_pcksclr_datamove
1943 * @opxcpttype 1
1944 * @optest op1=1 op2=2 -> op1=2
1945 * @optest op1=0 op2=-42 -> op1=-42
1946 * @note Almost identical to vmovapd.
1947 */
1948FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1949{
1950 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1952 Assert(pVCpu->iem.s.uVexLength <= 1);
1953 if (IEM_IS_MODRM_REG_MODE(bRm))
1954 {
1955 /*
1956 * Register, register.
1957 */
1958 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1959 IEM_MC_BEGIN(1, 0);
1960
1961 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1962 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1963 if (pVCpu->iem.s.uVexLength == 0)
1964 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1965 IEM_GET_MODRM_REG(pVCpu, bRm));
1966 else
1967 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1968 IEM_GET_MODRM_REG(pVCpu, bRm));
1969 IEM_MC_ADVANCE_RIP_AND_FINISH();
1970 IEM_MC_END();
1971 }
1972 else
1973 {
1974 /*
1975 * Register, memory.
1976 */
1977 if (pVCpu->iem.s.uVexLength == 0)
1978 {
1979 IEM_MC_BEGIN(0, 2);
1980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1981 IEM_MC_LOCAL(RTUINT128U, uSrc);
1982
1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1984 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1986 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1987
1988 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1989 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1990
1991 IEM_MC_ADVANCE_RIP_AND_FINISH();
1992 IEM_MC_END();
1993 }
1994 else
1995 {
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1998 IEM_MC_LOCAL(RTUINT256U, uSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2002 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2003 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2004
2005 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2006 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2007
2008 IEM_MC_ADVANCE_RIP_AND_FINISH();
2009 IEM_MC_END();
2010 }
2011 }
2012}
2013
2014/**
2015 * @opcode 0x29
2016 * @oppfx 66
2017 * @opcpuid avx
2018 * @opgroup og_avx_pcksclr_datamove
2019 * @opxcpttype 1
2020 * @optest op1=1 op2=2 -> op1=2
2021 * @optest op1=0 op2=-42 -> op1=-42
2022 * @note Almost identical to vmovaps
2023 */
2024FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2025{
2026 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2027 Assert(pVCpu->iem.s.uVexLength <= 1);
2028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2029 if (IEM_IS_MODRM_REG_MODE(bRm))
2030 {
2031 /*
2032 * Register, register.
2033 */
2034 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2035 IEM_MC_BEGIN(1, 0);
2036
2037 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2038 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2039 if (pVCpu->iem.s.uVexLength == 0)
2040 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2041 IEM_GET_MODRM_REG(pVCpu, bRm));
2042 else
2043 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2044 IEM_GET_MODRM_REG(pVCpu, bRm));
2045 IEM_MC_ADVANCE_RIP_AND_FINISH();
2046 IEM_MC_END();
2047 }
2048 else
2049 {
2050 /*
2051 * Register, memory.
2052 */
2053 if (pVCpu->iem.s.uVexLength == 0)
2054 {
2055 IEM_MC_BEGIN(0, 2);
2056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2057 IEM_MC_LOCAL(RTUINT128U, uSrc);
2058
2059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2060 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2063
2064 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2065 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2066
2067 IEM_MC_ADVANCE_RIP_AND_FINISH();
2068 IEM_MC_END();
2069 }
2070 else
2071 {
2072 IEM_MC_BEGIN(0, 2);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2074 IEM_MC_LOCAL(RTUINT256U, uSrc);
2075
2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2077 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2079 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2080
2081 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2082 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2083
2084 IEM_MC_ADVANCE_RIP_AND_FINISH();
2085 IEM_MC_END();
2086 }
2087 }
2088}
2089
2090
2091/**
2092 * @opmnemonic udvexf30f29
2093 * @opcode 0x29
2094 * @oppfx 0xf3
2095 * @opunused vex.modrm
2096 * @opcpuid avx
2097 * @optest ->
2098 * @opdone
2099 */
2100
2101/**
2102 * @opmnemonic udvexf20f29
2103 * @opcode 0x29
2104 * @oppfx 0xf2
2105 * @opunused vex.modrm
2106 * @opcpuid avx
2107 * @optest ->
2108 * @opdone
2109 */
2110
2111
2112/** Opcode VEX.0F 0x2a - invalid */
2113/** Opcode VEX.66.0F 0x2a - invalid */
2114/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2115FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2116/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2117FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2118
2119
2120/**
2121 * @opcode 0x2b
2122 * @opcodesub !11 mr/reg
2123 * @oppfx none
2124 * @opcpuid avx
2125 * @opgroup og_avx_cachect
2126 * @opxcpttype 1
2127 * @optest op1=1 op2=2 -> op1=2
2128 * @optest op1=0 op2=-42 -> op1=-42
2129 * @note Identical implementation to vmovntpd
2130 */
2131FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2132{
2133 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2134 Assert(pVCpu->iem.s.uVexLength <= 1);
2135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2136 if (IEM_IS_MODRM_MEM_MODE(bRm))
2137 {
2138 /*
2139 * memory, register.
2140 */
2141 if (pVCpu->iem.s.uVexLength == 0)
2142 {
2143 IEM_MC_BEGIN(0, 2);
2144 IEM_MC_LOCAL(RTUINT128U, uSrc);
2145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2146
2147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2148 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2150 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2151
2152 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2153 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2154
2155 IEM_MC_ADVANCE_RIP_AND_FINISH();
2156 IEM_MC_END();
2157 }
2158 else
2159 {
2160 IEM_MC_BEGIN(0, 2);
2161 IEM_MC_LOCAL(RTUINT256U, uSrc);
2162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2163
2164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2165 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2166 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2167 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2168
2169 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2170 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2171
2172 IEM_MC_ADVANCE_RIP_AND_FINISH();
2173 IEM_MC_END();
2174 }
2175 }
2176 /* The register, register encoding is invalid. */
2177 else
2178 return IEMOP_RAISE_INVALID_OPCODE();
2179}
2180
2181/**
2182 * @opcode 0x2b
2183 * @opcodesub !11 mr/reg
2184 * @oppfx 0x66
2185 * @opcpuid avx
2186 * @opgroup og_avx_cachect
2187 * @opxcpttype 1
2188 * @optest op1=1 op2=2 -> op1=2
2189 * @optest op1=0 op2=-42 -> op1=-42
2190 * @note Identical implementation to vmovntps
2191 */
2192FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2193{
2194 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2195 Assert(pVCpu->iem.s.uVexLength <= 1);
2196 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2197 if (IEM_IS_MODRM_MEM_MODE(bRm))
2198 {
2199 /*
2200 * memory, register.
2201 */
2202 if (pVCpu->iem.s.uVexLength == 0)
2203 {
2204 IEM_MC_BEGIN(0, 2);
2205 IEM_MC_LOCAL(RTUINT128U, uSrc);
2206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2207
2208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2209 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2210 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2211 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2212
2213 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2214 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2215
2216 IEM_MC_ADVANCE_RIP_AND_FINISH();
2217 IEM_MC_END();
2218 }
2219 else
2220 {
2221 IEM_MC_BEGIN(0, 2);
2222 IEM_MC_LOCAL(RTUINT256U, uSrc);
2223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2224
2225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2226 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2229
2230 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2231 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2232
2233 IEM_MC_ADVANCE_RIP_AND_FINISH();
2234 IEM_MC_END();
2235 }
2236 }
2237 /* The register, register encoding is invalid. */
2238 else
2239 return IEMOP_RAISE_INVALID_OPCODE();
2240}
2241
2242/**
2243 * @opmnemonic udvexf30f2b
2244 * @opcode 0x2b
2245 * @oppfx 0xf3
2246 * @opunused vex.modrm
2247 * @opcpuid avx
2248 * @optest ->
2249 * @opdone
2250 */
2251
2252/**
2253 * @opmnemonic udvexf20f2b
2254 * @opcode 0x2b
2255 * @oppfx 0xf2
2256 * @opunused vex.modrm
2257 * @opcpuid avx
2258 * @optest ->
2259 * @opdone
2260 */
2261
2262
2263/* Opcode VEX.0F 0x2c - invalid */
2264/* Opcode VEX.66.0F 0x2c - invalid */
2265/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2266FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2267/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2268FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2269
2270/* Opcode VEX.0F 0x2d - invalid */
2271/* Opcode VEX.66.0F 0x2d - invalid */
2272/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2273FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2274/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2275FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2276
2277
2278/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2279FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2280{
2281 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2283 if (IEM_IS_MODRM_REG_MODE(bRm))
2284 {
2285 /*
2286 * Register, register.
2287 */
2288 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2289 IEM_MC_BEGIN(4, 1);
2290 IEM_MC_LOCAL(uint32_t, fEFlags);
2291 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2292 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2293 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2294 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2295 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2296 IEM_MC_PREPARE_AVX_USAGE();
2297 IEM_MC_FETCH_EFLAGS(fEFlags);
2298 IEM_MC_REF_MXCSR(pfMxcsr);
2299 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2300 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2301 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2302 pfMxcsr, pEFlags, puSrc1, puSrc2);
2303 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2304 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2305 } IEM_MC_ELSE() {
2306 IEM_MC_COMMIT_EFLAGS(fEFlags);
2307 } IEM_MC_ENDIF();
2308
2309 IEM_MC_ADVANCE_RIP_AND_FINISH();
2310 IEM_MC_END();
2311 }
2312 else
2313 {
2314 /*
2315 * Register, memory.
2316 */
2317 IEM_MC_BEGIN(4, 3);
2318 IEM_MC_LOCAL(uint32_t, fEFlags);
2319 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2320 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2321 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2322 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2323 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2325
2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2327 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2328 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2329 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2330
2331 IEM_MC_PREPARE_AVX_USAGE();
2332 IEM_MC_FETCH_EFLAGS(fEFlags);
2333 IEM_MC_REF_MXCSR(pfMxcsr);
2334 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2335 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2336 pfMxcsr, pEFlags, puSrc1, puSrc2);
2337 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2338 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2339 } IEM_MC_ELSE() {
2340 IEM_MC_COMMIT_EFLAGS(fEFlags);
2341 } IEM_MC_ENDIF();
2342
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 }
2346}
2347
2348
2349/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2350FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2351{
2352 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if (IEM_IS_MODRM_REG_MODE(bRm))
2355 {
2356 /*
2357 * Register, register.
2358 */
2359 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2360 IEM_MC_BEGIN(4, 1);
2361 IEM_MC_LOCAL(uint32_t, fEFlags);
2362 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2363 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2364 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2365 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2366 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2367 IEM_MC_PREPARE_AVX_USAGE();
2368 IEM_MC_FETCH_EFLAGS(fEFlags);
2369 IEM_MC_REF_MXCSR(pfMxcsr);
2370 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2371 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2372 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2373 pfMxcsr, pEFlags, puSrc1, puSrc2);
2374 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2375 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2376 } IEM_MC_ELSE() {
2377 IEM_MC_COMMIT_EFLAGS(fEFlags);
2378 } IEM_MC_ENDIF();
2379
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 }
2383 else
2384 {
2385 /*
2386 * Register, memory.
2387 */
2388 IEM_MC_BEGIN(4, 3);
2389 IEM_MC_LOCAL(uint32_t, fEFlags);
2390 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2391 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2392 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2393 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2394 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2396
2397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2398 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2400 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2401
2402 IEM_MC_PREPARE_AVX_USAGE();
2403 IEM_MC_FETCH_EFLAGS(fEFlags);
2404 IEM_MC_REF_MXCSR(pfMxcsr);
2405 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2406 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2407 pfMxcsr, pEFlags, puSrc1, puSrc2);
2408 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2409 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2410 } IEM_MC_ELSE() {
2411 IEM_MC_COMMIT_EFLAGS(fEFlags);
2412 } IEM_MC_ENDIF();
2413
2414 IEM_MC_ADVANCE_RIP_AND_FINISH();
2415 IEM_MC_END();
2416 }
2417}
2418
2419
2420/* Opcode VEX.F3.0F 0x2e - invalid */
2421/* Opcode VEX.F2.0F 0x2e - invalid */
2422
2423/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2424FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2425{
2426 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2428 if (IEM_IS_MODRM_REG_MODE(bRm))
2429 {
2430 /*
2431 * Register, register.
2432 */
2433 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2434 IEM_MC_BEGIN(4, 1);
2435 IEM_MC_LOCAL(uint32_t, fEFlags);
2436 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2437 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2438 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2439 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2440 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2441 IEM_MC_PREPARE_AVX_USAGE();
2442 IEM_MC_FETCH_EFLAGS(fEFlags);
2443 IEM_MC_REF_MXCSR(pfMxcsr);
2444 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2445 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2446 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2447 pfMxcsr, pEFlags, puSrc1, puSrc2);
2448 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2449 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2450 } IEM_MC_ELSE() {
2451 IEM_MC_COMMIT_EFLAGS(fEFlags);
2452 } IEM_MC_ENDIF();
2453
2454 IEM_MC_ADVANCE_RIP_AND_FINISH();
2455 IEM_MC_END();
2456 }
2457 else
2458 {
2459 /*
2460 * Register, memory.
2461 */
2462 IEM_MC_BEGIN(4, 3);
2463 IEM_MC_LOCAL(uint32_t, fEFlags);
2464 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2465 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2466 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2467 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2468 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2470
2471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2472 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2474 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2475
2476 IEM_MC_PREPARE_AVX_USAGE();
2477 IEM_MC_FETCH_EFLAGS(fEFlags);
2478 IEM_MC_REF_MXCSR(pfMxcsr);
2479 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2480 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2481 pfMxcsr, pEFlags, puSrc1, puSrc2);
2482 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2483 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2484 } IEM_MC_ELSE() {
2485 IEM_MC_COMMIT_EFLAGS(fEFlags);
2486 } IEM_MC_ENDIF();
2487
2488 IEM_MC_ADVANCE_RIP_AND_FINISH();
2489 IEM_MC_END();
2490 }
2491}
2492
2493
2494/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2495FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2496{
2497 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2499 if (IEM_IS_MODRM_REG_MODE(bRm))
2500 {
2501 /*
2502 * Register, register.
2503 */
2504 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2505 IEM_MC_BEGIN(4, 1);
2506 IEM_MC_LOCAL(uint32_t, fEFlags);
2507 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2508 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2509 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2510 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2512 IEM_MC_PREPARE_AVX_USAGE();
2513 IEM_MC_FETCH_EFLAGS(fEFlags);
2514 IEM_MC_REF_MXCSR(pfMxcsr);
2515 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2516 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2517 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2518 pfMxcsr, pEFlags, puSrc1, puSrc2);
2519 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2520 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2521 } IEM_MC_ELSE() {
2522 IEM_MC_COMMIT_EFLAGS(fEFlags);
2523 } IEM_MC_ENDIF();
2524
2525 IEM_MC_ADVANCE_RIP_AND_FINISH();
2526 IEM_MC_END();
2527 }
2528 else
2529 {
2530 /*
2531 * Register, memory.
2532 */
2533 IEM_MC_BEGIN(4, 3);
2534 IEM_MC_LOCAL(uint32_t, fEFlags);
2535 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2536 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2537 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2538 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2539 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2541
2542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2543 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2544 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2545 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2546
2547 IEM_MC_PREPARE_AVX_USAGE();
2548 IEM_MC_FETCH_EFLAGS(fEFlags);
2549 IEM_MC_REF_MXCSR(pfMxcsr);
2550 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2551 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2552 pfMxcsr, pEFlags, puSrc1, puSrc2);
2553 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2554 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2555 } IEM_MC_ELSE() {
2556 IEM_MC_COMMIT_EFLAGS(fEFlags);
2557 } IEM_MC_ENDIF();
2558
2559 IEM_MC_ADVANCE_RIP_AND_FINISH();
2560 IEM_MC_END();
2561 }
2562}
2563
2564
2565/* Opcode VEX.F3.0F 0x2f - invalid */
2566/* Opcode VEX.F2.0F 0x2f - invalid */
2567
2568/* Opcode VEX.0F 0x30 - invalid */
2569/* Opcode VEX.0F 0x31 - invalid */
2570/* Opcode VEX.0F 0x32 - invalid */
2571/* Opcode VEX.0F 0x33 - invalid */
2572/* Opcode VEX.0F 0x34 - invalid */
2573/* Opcode VEX.0F 0x35 - invalid */
2574/* Opcode VEX.0F 0x36 - invalid */
2575/* Opcode VEX.0F 0x37 - invalid */
2576/* Opcode VEX.0F 0x38 - invalid */
2577/* Opcode VEX.0F 0x39 - invalid */
2578/* Opcode VEX.0F 0x3a - invalid */
2579/* Opcode VEX.0F 0x3b - invalid */
2580/* Opcode VEX.0F 0x3c - invalid */
2581/* Opcode VEX.0F 0x3d - invalid */
2582/* Opcode VEX.0F 0x3e - invalid */
2583/* Opcode VEX.0F 0x3f - invalid */
2584/* Opcode VEX.0F 0x40 - invalid */
2585/* Opcode VEX.0F 0x41 - invalid */
2586/* Opcode VEX.0F 0x42 - invalid */
2587/* Opcode VEX.0F 0x43 - invalid */
2588/* Opcode VEX.0F 0x44 - invalid */
2589/* Opcode VEX.0F 0x45 - invalid */
2590/* Opcode VEX.0F 0x46 - invalid */
2591/* Opcode VEX.0F 0x47 - invalid */
2592/* Opcode VEX.0F 0x48 - invalid */
2593/* Opcode VEX.0F 0x49 - invalid */
2594/* Opcode VEX.0F 0x4a - invalid */
2595/* Opcode VEX.0F 0x4b - invalid */
2596/* Opcode VEX.0F 0x4c - invalid */
2597/* Opcode VEX.0F 0x4d - invalid */
2598/* Opcode VEX.0F 0x4e - invalid */
2599/* Opcode VEX.0F 0x4f - invalid */
2600
2601
2602/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2603FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2604{
2605 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2607 if (IEM_IS_MODRM_REG_MODE(bRm))
2608 {
2609 /*
2610 * Register, register.
2611 */
2612 if (pVCpu->iem.s.uVexLength == 0)
2613 {
2614 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2615 IEM_MC_BEGIN(2, 1);
2616 IEM_MC_LOCAL(uint8_t, u8Dst);
2617 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2618 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2619 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2620 IEM_MC_PREPARE_AVX_USAGE();
2621 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2622 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2623 pu8Dst, puSrc);
2624 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2625 IEM_MC_ADVANCE_RIP_AND_FINISH();
2626 IEM_MC_END();
2627 }
2628 else
2629 {
2630 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2631 IEM_MC_BEGIN(2, 2);
2632 IEM_MC_LOCAL(uint8_t, u8Dst);
2633 IEM_MC_LOCAL(RTUINT256U, uSrc);
2634 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2635 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2636
2637 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2638 IEM_MC_PREPARE_AVX_USAGE();
2639 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2640 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2641 pu8Dst, puSrc);
2642 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2643 IEM_MC_ADVANCE_RIP_AND_FINISH();
2644 IEM_MC_END();
2645 }
2646 }
2647 /* No memory operand. */
2648 else
2649 return IEMOP_RAISE_INVALID_OPCODE();
2650}
2651
2652
2653/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2654FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2655{
2656 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2658 if (IEM_IS_MODRM_REG_MODE(bRm))
2659 {
2660 /*
2661 * Register, register.
2662 */
2663 if (pVCpu->iem.s.uVexLength == 0)
2664 {
2665 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2666 IEM_MC_BEGIN(2, 1);
2667 IEM_MC_LOCAL(uint8_t, u8Dst);
2668 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2669 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2670 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2671 IEM_MC_PREPARE_AVX_USAGE();
2672 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2673 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2674 pu8Dst, puSrc);
2675 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2676 IEM_MC_ADVANCE_RIP_AND_FINISH();
2677 IEM_MC_END();
2678 }
2679 else
2680 {
2681 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2682 IEM_MC_BEGIN(2, 2);
2683 IEM_MC_LOCAL(uint8_t, u8Dst);
2684 IEM_MC_LOCAL(RTUINT256U, uSrc);
2685 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2686 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2687
2688 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2689 IEM_MC_PREPARE_AVX_USAGE();
2690 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2691 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2692 pu8Dst, puSrc);
2693 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2694 IEM_MC_ADVANCE_RIP_AND_FINISH();
2695 IEM_MC_END();
2696 }
2697 }
2698 /* No memory operand. */
2699 else
2700 return IEMOP_RAISE_INVALID_OPCODE();
2701}
2702
2703
2704/* Opcode VEX.F3.0F 0x50 - invalid */
2705/* Opcode VEX.F2.0F 0x50 - invalid */
2706
2707/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2708FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2709/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2710FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2711/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2712FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2713/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2714FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2715
2716/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2717FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2718/* Opcode VEX.66.0F 0x52 - invalid */
2719/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2720FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2721/* Opcode VEX.F2.0F 0x52 - invalid */
2722
2723/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2724FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2725/* Opcode VEX.66.0F 0x53 - invalid */
2726/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2727FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2728/* Opcode VEX.F2.0F 0x53 - invalid */
2729
2730
2731/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2732FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2733{
2734 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2735 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2736 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2737}
2738
2739
2740/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2741FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2742{
2743 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2744 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2745 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2746}
2747
2748
2749/* Opcode VEX.F3.0F 0x54 - invalid */
2750/* Opcode VEX.F2.0F 0x54 - invalid */
2751
2752
2753/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2754FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2755{
2756 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2757 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2758 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2759}
2760
2761
2762/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2763FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2764{
2765 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2767 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2768}
2769
2770
2771/* Opcode VEX.F3.0F 0x55 - invalid */
2772/* Opcode VEX.F2.0F 0x55 - invalid */
2773
2774/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2775FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2776{
2777 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2778 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2779 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2780}
2781
2782
2783/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2784FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2785{
2786 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2787 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2788 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2789}
2790
2791
2792/* Opcode VEX.F3.0F 0x56 - invalid */
2793/* Opcode VEX.F2.0F 0x56 - invalid */
2794
2795
2796/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2797FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2798{
2799 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2800 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2801 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2802}
2803
2804
2805/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2806FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2807{
2808 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2809 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2811}
2812
2813
2814/* Opcode VEX.F3.0F 0x57 - invalid */
2815/* Opcode VEX.F2.0F 0x57 - invalid */
2816
2817/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2818FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2819/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2820FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2821/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2822FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2823/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2824FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2825
2826/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2827FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2828/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2829FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2830/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2831FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2832/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2833FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2834
2835/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2836FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2837/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2838FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2839/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2840FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2841/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2842FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2843
2844/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2845FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2846/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2847FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2848/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2849FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2850/* Opcode VEX.F2.0F 0x5b - invalid */
2851
2852/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2853FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2854/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2855FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2856/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2857FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2858/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2859FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2860
2861/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2862FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2863/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2864FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2865/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2866FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2867/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2868FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2869
2870/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2871FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2872/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2873FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2874/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2875FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2876/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2877FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2878
2879/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2880FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2881/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2882FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2883/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2884FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2885/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2886FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2887
2888
2889/* Opcode VEX.0F 0x60 - invalid */
2890
2891
2892/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2893FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2894{
2895 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2896 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2897 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2898}
2899
2900
2901/* Opcode VEX.F3.0F 0x60 - invalid */
2902
2903
2904/* Opcode VEX.0F 0x61 - invalid */
2905
2906
2907/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2908FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2909{
2910 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2911 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2912 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2913}
2914
2915
2916/* Opcode VEX.F3.0F 0x61 - invalid */
2917
2918
2919/* Opcode VEX.0F 0x62 - invalid */
2920
2921/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2922FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2923{
2924 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2925 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2926 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2927}
2928
2929
2930/* Opcode VEX.F3.0F 0x62 - invalid */
2931
2932
2933
2934/* Opcode VEX.0F 0x63 - invalid */
2935
2936
2937/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2938FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2939{
2940 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2941 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2942 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2943}
2944
2945
2946/* Opcode VEX.F3.0F 0x63 - invalid */
2947
2948/* Opcode VEX.0F 0x64 - invalid */
2949
2950
2951/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2952FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2953{
2954 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2955 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2956 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2957}
2958
2959
2960/* Opcode VEX.F3.0F 0x64 - invalid */
2961
2962/* Opcode VEX.0F 0x65 - invalid */
2963
2964
2965/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2966FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2967{
2968 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2969 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2970 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2971}
2972
2973
2974/* Opcode VEX.F3.0F 0x65 - invalid */
2975
2976/* Opcode VEX.0F 0x66 - invalid */
2977
2978
2979/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2980FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2981{
2982 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2983 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2984 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2985}
2986
2987
2988/* Opcode VEX.F3.0F 0x66 - invalid */
2989
2990/* Opcode VEX.0F 0x67 - invalid */
2991
2992
2993/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2994FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
2995{
2996 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2997 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
2998 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2999}
3000
3001
3002/* Opcode VEX.F3.0F 0x67 - invalid */
3003
3004
3005///**
3006// * Common worker for SSE2 instructions on the form:
3007// * pxxxx xmm1, xmm2/mem128
3008// *
3009// * The 2nd operand is the second half of a register, which in the memory case
3010// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3011// * where it may read the full 128 bits or only the upper 64 bits.
3012// *
3013// * Exceptions type 4.
3014// */
3015//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3016//{
3017// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3018// if (IEM_IS_MODRM_REG_MODE(bRm))
3019// {
3020// /*
3021// * Register, register.
3022// */
3023// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3024// IEM_MC_BEGIN(2, 0);
3025// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3026// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3027// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3028// IEM_MC_PREPARE_SSE_USAGE();
3029// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3030// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3031// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3032// IEM_MC_ADVANCE_RIP_AND_FINISH();
3033// IEM_MC_END();
3034// }
3035// else
3036// {
3037// /*
3038// * Register, memory.
3039// */
3040// IEM_MC_BEGIN(2, 2);
3041// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3042// IEM_MC_LOCAL(RTUINT128U, uSrc);
3043// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3044// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3045//
3046// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3047// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3048// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3049// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3050//
3051// IEM_MC_PREPARE_SSE_USAGE();
3052// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3053// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3054//
3055// IEM_MC_ADVANCE_RIP_AND_FINISH();
3056// IEM_MC_END();
3057// }
3058// return VINF_SUCCESS;
3059//}
3060
3061
3062/* Opcode VEX.0F 0x68 - invalid */
3063
3064/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3065FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3066{
3067 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3068 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3069 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3070}
3071
3072
3073/* Opcode VEX.F3.0F 0x68 - invalid */
3074
3075
3076/* Opcode VEX.0F 0x69 - invalid */
3077
3078
3079/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3080FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3081{
3082 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3083 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3084 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3085}
3086
3087
3088/* Opcode VEX.F3.0F 0x69 - invalid */
3089
3090
3091/* Opcode VEX.0F 0x6a - invalid */
3092
3093
3094/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3095FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3096{
3097 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3098 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3099 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3100}
3101
3102
3103/* Opcode VEX.F3.0F 0x6a - invalid */
3104
3105
3106/* Opcode VEX.0F 0x6b - invalid */
3107
3108
3109/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3110FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3111{
3112 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3113 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3114 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3115}
3116
3117
3118/* Opcode VEX.F3.0F 0x6b - invalid */
3119
3120
3121/* Opcode VEX.0F 0x6c - invalid */
3122
3123
3124/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3125FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3126{
3127 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3128 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3129 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3130}
3131
3132
3133/* Opcode VEX.F3.0F 0x6c - invalid */
3134/* Opcode VEX.F2.0F 0x6c - invalid */
3135
3136
3137/* Opcode VEX.0F 0x6d - invalid */
3138
3139
3140/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3141FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3142{
3143 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3144 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3145 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3146}
3147
3148
3149/* Opcode VEX.F3.0F 0x6d - invalid */
3150
3151
3152/* Opcode VEX.0F 0x6e - invalid */
3153
3154FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3155{
3156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3157 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3158 {
3159 /**
3160 * @opcode 0x6e
3161 * @opcodesub rex.w=1
3162 * @oppfx 0x66
3163 * @opcpuid avx
3164 * @opgroup og_avx_simdint_datamov
3165 * @opxcpttype 5
3166 * @optest 64-bit / op1=1 op2=2 -> op1=2
3167 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3168 */
3169 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3170 if (IEM_IS_MODRM_REG_MODE(bRm))
3171 {
3172 /* XMM, greg64 */
3173 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3174 IEM_MC_BEGIN(0, 1);
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176
3177 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3178 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3179
3180 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3181 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3182
3183 IEM_MC_ADVANCE_RIP_AND_FINISH();
3184 IEM_MC_END();
3185 }
3186 else
3187 {
3188 /* XMM, [mem64] */
3189 IEM_MC_BEGIN(0, 2);
3190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3191 IEM_MC_LOCAL(uint64_t, u64Tmp);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3195 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3196 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3197
3198 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3199 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3200
3201 IEM_MC_ADVANCE_RIP_AND_FINISH();
3202 IEM_MC_END();
3203 }
3204 }
3205 else
3206 {
3207 /**
3208 * @opdone
3209 * @opcode 0x6e
3210 * @opcodesub rex.w=0
3211 * @oppfx 0x66
3212 * @opcpuid avx
3213 * @opgroup og_avx_simdint_datamov
3214 * @opxcpttype 5
3215 * @opfunction iemOp_vmovd_q_Vy_Ey
3216 * @optest op1=1 op2=2 -> op1=2
3217 * @optest op1=0 op2=-42 -> op1=-42
3218 */
3219 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3220 if (IEM_IS_MODRM_REG_MODE(bRm))
3221 {
3222 /* XMM, greg32 */
3223 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3224 IEM_MC_BEGIN(0, 1);
3225 IEM_MC_LOCAL(uint32_t, u32Tmp);
3226
3227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3229
3230 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3231 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3232
3233 IEM_MC_ADVANCE_RIP_AND_FINISH();
3234 IEM_MC_END();
3235 }
3236 else
3237 {
3238 /* XMM, [mem32] */
3239 IEM_MC_BEGIN(0, 2);
3240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3241 IEM_MC_LOCAL(uint32_t, u32Tmp);
3242
3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3244 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3247
3248 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3249 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3250
3251 IEM_MC_ADVANCE_RIP_AND_FINISH();
3252 IEM_MC_END();
3253 }
3254 }
3255}
3256
3257
3258/* Opcode VEX.F3.0F 0x6e - invalid */
3259
3260
3261/* Opcode VEX.0F 0x6f - invalid */
3262
3263/**
3264 * @opcode 0x6f
3265 * @oppfx 0x66
3266 * @opcpuid avx
3267 * @opgroup og_avx_simdint_datamove
3268 * @opxcpttype 1
3269 * @optest op1=1 op2=2 -> op1=2
3270 * @optest op1=0 op2=-42 -> op1=-42
3271 */
3272FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3273{
3274 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3275 Assert(pVCpu->iem.s.uVexLength <= 1);
3276 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3277 if (IEM_IS_MODRM_REG_MODE(bRm))
3278 {
3279 /*
3280 * Register, register.
3281 */
3282 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3283 IEM_MC_BEGIN(0, 0);
3284
3285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3286 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3287 if (pVCpu->iem.s.uVexLength == 0)
3288 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3289 IEM_GET_MODRM_RM(pVCpu, bRm));
3290 else
3291 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3292 IEM_GET_MODRM_RM(pVCpu, bRm));
3293 IEM_MC_ADVANCE_RIP_AND_FINISH();
3294 IEM_MC_END();
3295 }
3296 else if (pVCpu->iem.s.uVexLength == 0)
3297 {
3298 /*
3299 * Register, memory128.
3300 */
3301 IEM_MC_BEGIN(0, 2);
3302 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3304
3305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3306 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3308 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3309
3310 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3311 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3312
3313 IEM_MC_ADVANCE_RIP_AND_FINISH();
3314 IEM_MC_END();
3315 }
3316 else
3317 {
3318 /*
3319 * Register, memory256.
3320 */
3321 IEM_MC_BEGIN(0, 2);
3322 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3324
3325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3326 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3327 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3328 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3329
3330 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3331 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3332
3333 IEM_MC_ADVANCE_RIP_AND_FINISH();
3334 IEM_MC_END();
3335 }
3336}
3337
3338/**
3339 * @opcode 0x6f
3340 * @oppfx 0xf3
3341 * @opcpuid avx
3342 * @opgroup og_avx_simdint_datamove
3343 * @opxcpttype 4UA
3344 * @optest op1=1 op2=2 -> op1=2
3345 * @optest op1=0 op2=-42 -> op1=-42
3346 */
3347FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3348{
3349 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3350 Assert(pVCpu->iem.s.uVexLength <= 1);
3351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3352 if (IEM_IS_MODRM_REG_MODE(bRm))
3353 {
3354 /*
3355 * Register, register.
3356 */
3357 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3358 IEM_MC_BEGIN(0, 0);
3359
3360 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3361 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3362 if (pVCpu->iem.s.uVexLength == 0)
3363 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3364 IEM_GET_MODRM_RM(pVCpu, bRm));
3365 else
3366 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3367 IEM_GET_MODRM_RM(pVCpu, bRm));
3368 IEM_MC_ADVANCE_RIP_AND_FINISH();
3369 IEM_MC_END();
3370 }
3371 else if (pVCpu->iem.s.uVexLength == 0)
3372 {
3373 /*
3374 * Register, memory128.
3375 */
3376 IEM_MC_BEGIN(0, 2);
3377 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3379
3380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3381 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3382 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3383 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3384
3385 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3386 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3387
3388 IEM_MC_ADVANCE_RIP_AND_FINISH();
3389 IEM_MC_END();
3390 }
3391 else
3392 {
3393 /*
3394 * Register, memory256.
3395 */
3396 IEM_MC_BEGIN(0, 2);
3397 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3399
3400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3401 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3403 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3404
3405 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3406 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3407
3408 IEM_MC_ADVANCE_RIP_AND_FINISH();
3409 IEM_MC_END();
3410 }
3411}
3412
3413
3414/* Opcode VEX.0F 0x70 - invalid */
3415
3416
3417/**
3418 * Common worker for AVX/AVX2 instructions on the forms:
3419 * - vpxxx xmm0, xmm2/mem128, imm8
3420 * - vpxxx ymm0, ymm2/mem256, imm8
3421 *
3422 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3423 */
3424FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3425{
3426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3427 if (IEM_IS_MODRM_REG_MODE(bRm))
3428 {
3429 /*
3430 * Register, register.
3431 */
3432 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3433 if (pVCpu->iem.s.uVexLength)
3434 {
3435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3436 IEM_MC_BEGIN(3, 2);
3437 IEM_MC_LOCAL(RTUINT256U, uDst);
3438 IEM_MC_LOCAL(RTUINT256U, uSrc);
3439 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3440 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3441 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3442 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3443 IEM_MC_PREPARE_AVX_USAGE();
3444 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3445 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3446 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3447 IEM_MC_ADVANCE_RIP_AND_FINISH();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3453 IEM_MC_BEGIN(3, 0);
3454 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3455 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3456 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3458 IEM_MC_PREPARE_AVX_USAGE();
3459 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3460 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3461 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3462 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3463 IEM_MC_ADVANCE_RIP_AND_FINISH();
3464 IEM_MC_END();
3465 }
3466 }
3467 else
3468 {
3469 /*
3470 * Register, memory.
3471 */
3472 if (pVCpu->iem.s.uVexLength)
3473 {
3474 IEM_MC_BEGIN(3, 3);
3475 IEM_MC_LOCAL(RTUINT256U, uDst);
3476 IEM_MC_LOCAL(RTUINT256U, uSrc);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3478 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3479 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3480
3481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3482 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3483 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3484 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3485 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3486 IEM_MC_PREPARE_AVX_USAGE();
3487
3488 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3489 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3490 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3491
3492 IEM_MC_ADVANCE_RIP_AND_FINISH();
3493 IEM_MC_END();
3494 }
3495 else
3496 {
3497 IEM_MC_BEGIN(3, 1);
3498 IEM_MC_LOCAL(RTUINT128U, uSrc);
3499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3500 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3501 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3504 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3505 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3506 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3508 IEM_MC_PREPARE_AVX_USAGE();
3509
3510 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3511 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3512 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3513 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3514
3515 IEM_MC_ADVANCE_RIP_AND_FINISH();
3516 IEM_MC_END();
3517 }
3518 }
3519}
3520
3521
3522/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3523FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3524{
3525 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3526 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3527 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3528
3529}
3530
3531
3532/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3533FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3534{
3535 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3536 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3537 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3538
3539}
3540
3541
3542/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3543FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3544{
3545 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3546 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3547 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3548}
3549
3550
3551/* Opcode VEX.0F 0x71 11/2 - invalid. */
3552/** Opcode VEX.66.0F 0x71 11/2. */
3553FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3554
3555/* Opcode VEX.0F 0x71 11/4 - invalid */
3556/** Opcode VEX.66.0F 0x71 11/4. */
3557FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3558
3559/* Opcode VEX.0F 0x71 11/6 - invalid */
3560/** Opcode VEX.66.0F 0x71 11/6. */
3561FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3562
3563
3564/**
3565 * VEX Group 12 jump table for register variant.
3566 */
3567IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3568{
3569 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3570 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3571 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3572 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3573 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3574 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3575 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3576 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3577};
3578AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3579
3580
3581/** Opcode VEX.0F 0x71. */
3582FNIEMOP_DEF(iemOp_VGrp12)
3583{
3584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3585 if (IEM_IS_MODRM_REG_MODE(bRm))
3586 /* register, register */
3587 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3588 + pVCpu->iem.s.idxPrefix], bRm);
3589 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3590}
3591
3592
3593/* Opcode VEX.0F 0x72 11/2 - invalid. */
3594/** Opcode VEX.66.0F 0x72 11/2. */
3595FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3596
3597/* Opcode VEX.0F 0x72 11/4 - invalid. */
3598/** Opcode VEX.66.0F 0x72 11/4. */
3599FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3600
3601/* Opcode VEX.0F 0x72 11/6 - invalid. */
3602/** Opcode VEX.66.0F 0x72 11/6. */
3603FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3604
3605
3606/**
3607 * Group 13 jump table for register variant.
3608 */
3609IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3610{
3611 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3612 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3613 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3614 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3615 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3616 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3617 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3618 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3619};
3620AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3621
3622/** Opcode VEX.0F 0x72. */
3623FNIEMOP_DEF(iemOp_VGrp13)
3624{
3625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3626 if (IEM_IS_MODRM_REG_MODE(bRm))
3627 /* register, register */
3628 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3629 + pVCpu->iem.s.idxPrefix], bRm);
3630 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3631}
3632
3633
3634/* Opcode VEX.0F 0x73 11/2 - invalid. */
3635/** Opcode VEX.66.0F 0x73 11/2. */
3636FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3637
3638/** Opcode VEX.66.0F 0x73 11/3. */
3639FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3640
3641/* Opcode VEX.0F 0x73 11/6 - invalid. */
3642/** Opcode VEX.66.0F 0x73 11/6. */
3643FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3644
3645/** Opcode VEX.66.0F 0x73 11/7. */
3646FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3647
3648/**
3649 * Group 14 jump table for register variant.
3650 */
3651IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3652{
3653 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3654 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3655 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3656 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3657 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3658 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3659 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3660 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3661};
3662AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3663
3664
3665/** Opcode VEX.0F 0x73. */
3666FNIEMOP_DEF(iemOp_VGrp14)
3667{
3668 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3669 if (IEM_IS_MODRM_REG_MODE(bRm))
3670 /* register, register */
3671 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3672 + pVCpu->iem.s.idxPrefix], bRm);
3673 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3674}
3675
3676
3677/* Opcode VEX.0F 0x74 - invalid */
3678
3679
3680/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3681FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3682{
3683 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3684 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3685 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3686}
3687
3688/* Opcode VEX.F3.0F 0x74 - invalid */
3689/* Opcode VEX.F2.0F 0x74 - invalid */
3690
3691
3692/* Opcode VEX.0F 0x75 - invalid */
3693
3694
3695/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3696FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3697{
3698 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3699 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3700 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3701}
3702
3703
3704/* Opcode VEX.F3.0F 0x75 - invalid */
3705/* Opcode VEX.F2.0F 0x75 - invalid */
3706
3707
3708/* Opcode VEX.0F 0x76 - invalid */
3709
3710
3711/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3712FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3713{
3714 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3715 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3716 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3717}
3718
3719
3720/* Opcode VEX.F3.0F 0x76 - invalid */
3721/* Opcode VEX.F2.0F 0x76 - invalid */
3722
3723
3724/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3725FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3726{
3727 Assert(pVCpu->iem.s.uVexLength <= 1);
3728 if (pVCpu->iem.s.uVexLength == 0)
3729 {
3730 /*
3731 * 128-bit: vzeroupper
3732 */
3733 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3734 IEM_MC_BEGIN(0, 0);
3735
3736 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3738 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3739
3740 IEM_MC_CLEAR_YREG_128_UP(0);
3741 IEM_MC_CLEAR_YREG_128_UP(1);
3742 IEM_MC_CLEAR_YREG_128_UP(2);
3743 IEM_MC_CLEAR_YREG_128_UP(3);
3744 IEM_MC_CLEAR_YREG_128_UP(4);
3745 IEM_MC_CLEAR_YREG_128_UP(5);
3746 IEM_MC_CLEAR_YREG_128_UP(6);
3747 IEM_MC_CLEAR_YREG_128_UP(7);
3748
3749 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3750 {
3751 IEM_MC_CLEAR_YREG_128_UP( 8);
3752 IEM_MC_CLEAR_YREG_128_UP( 9);
3753 IEM_MC_CLEAR_YREG_128_UP(10);
3754 IEM_MC_CLEAR_YREG_128_UP(11);
3755 IEM_MC_CLEAR_YREG_128_UP(12);
3756 IEM_MC_CLEAR_YREG_128_UP(13);
3757 IEM_MC_CLEAR_YREG_128_UP(14);
3758 IEM_MC_CLEAR_YREG_128_UP(15);
3759 }
3760
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 IEM_MC_END();
3763 }
3764 else
3765 {
3766 /*
3767 * 256-bit: vzeroall
3768 */
3769 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3770 IEM_MC_BEGIN(0, 1);
3771 IEM_MC_LOCAL(uint32_t, uZero);
3772
3773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3774 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3775 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3776
3777 IEM_MC_ASSIGN(uZero, 0);
3778 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3779 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3780 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3781 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3782 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3783 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3784 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3785 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3786
3787 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3788 {
3789 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3790 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3791 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3792 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3793 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3794 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3795 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3796 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3797 }
3798
3799 IEM_MC_ADVANCE_RIP_AND_FINISH();
3800 IEM_MC_END();
3801 }
3802}
3803
3804
3805/* Opcode VEX.66.0F 0x77 - invalid */
3806/* Opcode VEX.F3.0F 0x77 - invalid */
3807/* Opcode VEX.F2.0F 0x77 - invalid */
3808
3809/* Opcode VEX.0F 0x78 - invalid */
3810/* Opcode VEX.66.0F 0x78 - invalid */
3811/* Opcode VEX.F3.0F 0x78 - invalid */
3812/* Opcode VEX.F2.0F 0x78 - invalid */
3813
3814/* Opcode VEX.0F 0x79 - invalid */
3815/* Opcode VEX.66.0F 0x79 - invalid */
3816/* Opcode VEX.F3.0F 0x79 - invalid */
3817/* Opcode VEX.F2.0F 0x79 - invalid */
3818
3819/* Opcode VEX.0F 0x7a - invalid */
3820/* Opcode VEX.66.0F 0x7a - invalid */
3821/* Opcode VEX.F3.0F 0x7a - invalid */
3822/* Opcode VEX.F2.0F 0x7a - invalid */
3823
3824/* Opcode VEX.0F 0x7b - invalid */
3825/* Opcode VEX.66.0F 0x7b - invalid */
3826/* Opcode VEX.F3.0F 0x7b - invalid */
3827/* Opcode VEX.F2.0F 0x7b - invalid */
3828
3829/* Opcode VEX.0F 0x7c - invalid */
3830/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3831FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3832/* Opcode VEX.F3.0F 0x7c - invalid */
3833/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3834FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3835
3836/* Opcode VEX.0F 0x7d - invalid */
3837/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3838FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3839/* Opcode VEX.F3.0F 0x7d - invalid */
3840/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3841FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3842
3843
3844/* Opcode VEX.0F 0x7e - invalid */
3845
3846FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3847{
3848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3849 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3850 {
3851 /**
3852 * @opcode 0x7e
3853 * @opcodesub rex.w=1
3854 * @oppfx 0x66
3855 * @opcpuid avx
3856 * @opgroup og_avx_simdint_datamov
3857 * @opxcpttype 5
3858 * @optest 64-bit / op1=1 op2=2 -> op1=2
3859 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3860 */
3861 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3862 if (IEM_IS_MODRM_REG_MODE(bRm))
3863 {
3864 /* greg64, XMM */
3865 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3866 IEM_MC_BEGIN(0, 1);
3867 IEM_MC_LOCAL(uint64_t, u64Tmp);
3868
3869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3870 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3871
3872 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3873 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3874
3875 IEM_MC_ADVANCE_RIP_AND_FINISH();
3876 IEM_MC_END();
3877 }
3878 else
3879 {
3880 /* [mem64], XMM */
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3883 IEM_MC_LOCAL(uint64_t, u64Tmp);
3884
3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3886 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3889
3890 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3891 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3892
3893 IEM_MC_ADVANCE_RIP_AND_FINISH();
3894 IEM_MC_END();
3895 }
3896 }
3897 else
3898 {
3899 /**
3900 * @opdone
3901 * @opcode 0x7e
3902 * @opcodesub rex.w=0
3903 * @oppfx 0x66
3904 * @opcpuid avx
3905 * @opgroup og_avx_simdint_datamov
3906 * @opxcpttype 5
3907 * @opfunction iemOp_vmovd_q_Vy_Ey
3908 * @optest op1=1 op2=2 -> op1=2
3909 * @optest op1=0 op2=-42 -> op1=-42
3910 */
3911 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3912 if (IEM_IS_MODRM_REG_MODE(bRm))
3913 {
3914 /* greg32, XMM */
3915 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3916 IEM_MC_BEGIN(0, 1);
3917 IEM_MC_LOCAL(uint32_t, u32Tmp);
3918
3919 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3920 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3921
3922 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3923 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3924
3925 IEM_MC_ADVANCE_RIP_AND_FINISH();
3926 IEM_MC_END();
3927 }
3928 else
3929 {
3930 /* [mem32], XMM */
3931 IEM_MC_BEGIN(0, 2);
3932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3933 IEM_MC_LOCAL(uint32_t, u32Tmp);
3934
3935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3936 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3937 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3938 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3939
3940 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3941 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3942
3943 IEM_MC_ADVANCE_RIP_AND_FINISH();
3944 IEM_MC_END();
3945 }
3946 }
3947}
3948
3949
3950/**
3951 * @opcode 0x7e
3952 * @oppfx 0xf3
3953 * @opcpuid avx
3954 * @opgroup og_avx_pcksclr_datamove
3955 * @opxcpttype none
3956 * @optest op1=1 op2=2 -> op1=2
3957 * @optest op1=0 op2=-42 -> op1=-42
3958 */
3959FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3960{
3961 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3963 if (IEM_IS_MODRM_REG_MODE(bRm))
3964 {
3965 /*
3966 * Register, register.
3967 */
3968 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3969 IEM_MC_BEGIN(0, 0);
3970
3971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3973
3974 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3975 IEM_GET_MODRM_RM(pVCpu, bRm));
3976 IEM_MC_ADVANCE_RIP_AND_FINISH();
3977 IEM_MC_END();
3978 }
3979 else
3980 {
3981 /*
3982 * Memory, register.
3983 */
3984 IEM_MC_BEGIN(0, 2);
3985 IEM_MC_LOCAL(uint64_t, uSrc);
3986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3987
3988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3989 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3991 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3992
3993 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3994 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3995
3996 IEM_MC_ADVANCE_RIP_AND_FINISH();
3997 IEM_MC_END();
3998 }
3999
4000}
4001/* Opcode VEX.F2.0F 0x7e - invalid */
4002
4003
4004/* Opcode VEX.0F 0x7f - invalid */
4005
4006/**
4007 * @opcode 0x7f
4008 * @oppfx 0x66
4009 * @opcpuid avx
4010 * @opgroup og_avx_simdint_datamove
4011 * @opxcpttype 1
4012 * @optest op1=1 op2=2 -> op1=2
4013 * @optest op1=0 op2=-42 -> op1=-42
4014 */
4015FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4016{
4017 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4018 Assert(pVCpu->iem.s.uVexLength <= 1);
4019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4020 if (IEM_IS_MODRM_REG_MODE(bRm))
4021 {
4022 /*
4023 * Register, register.
4024 */
4025 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4026 IEM_MC_BEGIN(0, 0);
4027
4028 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4029 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4030 if (pVCpu->iem.s.uVexLength == 0)
4031 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4032 IEM_GET_MODRM_REG(pVCpu, bRm));
4033 else
4034 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4035 IEM_GET_MODRM_REG(pVCpu, bRm));
4036 IEM_MC_ADVANCE_RIP_AND_FINISH();
4037 IEM_MC_END();
4038 }
4039 else if (pVCpu->iem.s.uVexLength == 0)
4040 {
4041 /*
4042 * Register, memory128.
4043 */
4044 IEM_MC_BEGIN(0, 2);
4045 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4047
4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4049 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4050 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4051 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4052
4053 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4054 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4055
4056 IEM_MC_ADVANCE_RIP_AND_FINISH();
4057 IEM_MC_END();
4058 }
4059 else
4060 {
4061 /*
4062 * Register, memory256.
4063 */
4064 IEM_MC_BEGIN(0, 2);
4065 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4067
4068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4069 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4070 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4071 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4072
4073 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4074 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4075
4076 IEM_MC_ADVANCE_RIP_AND_FINISH();
4077 IEM_MC_END();
4078 }
4079}
4080
4081
4082/**
4083 * @opcode 0x7f
4084 * @oppfx 0xf3
4085 * @opcpuid avx
4086 * @opgroup og_avx_simdint_datamove
4087 * @opxcpttype 4UA
4088 * @optest op1=1 op2=2 -> op1=2
4089 * @optest op1=0 op2=-42 -> op1=-42
4090 */
4091FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4092{
4093 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4094 Assert(pVCpu->iem.s.uVexLength <= 1);
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096 if (IEM_IS_MODRM_REG_MODE(bRm))
4097 {
4098 /*
4099 * Register, register.
4100 */
4101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4102 IEM_MC_BEGIN(0, 0);
4103
4104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4105 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4106 if (pVCpu->iem.s.uVexLength == 0)
4107 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4108 IEM_GET_MODRM_REG(pVCpu, bRm));
4109 else
4110 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4111 IEM_GET_MODRM_REG(pVCpu, bRm));
4112 IEM_MC_ADVANCE_RIP_AND_FINISH();
4113 IEM_MC_END();
4114 }
4115 else if (pVCpu->iem.s.uVexLength == 0)
4116 {
4117 /*
4118 * Register, memory128.
4119 */
4120 IEM_MC_BEGIN(0, 2);
4121 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4123
4124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4125 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4126 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4127 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4128
4129 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4130 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4131
4132 IEM_MC_ADVANCE_RIP_AND_FINISH();
4133 IEM_MC_END();
4134 }
4135 else
4136 {
4137 /*
4138 * Register, memory256.
4139 */
4140 IEM_MC_BEGIN(0, 2);
4141 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4143
4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4145 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4146 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4147 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4148
4149 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4150 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4151
4152 IEM_MC_ADVANCE_RIP_AND_FINISH();
4153 IEM_MC_END();
4154 }
4155}
4156
4157/* Opcode VEX.F2.0F 0x7f - invalid */
4158
4159
4160/* Opcode VEX.0F 0x80 - invalid */
4161/* Opcode VEX.0F 0x81 - invalid */
4162/* Opcode VEX.0F 0x82 - invalid */
4163/* Opcode VEX.0F 0x83 - invalid */
4164/* Opcode VEX.0F 0x84 - invalid */
4165/* Opcode VEX.0F 0x85 - invalid */
4166/* Opcode VEX.0F 0x86 - invalid */
4167/* Opcode VEX.0F 0x87 - invalid */
4168/* Opcode VEX.0F 0x88 - invalid */
4169/* Opcode VEX.0F 0x89 - invalid */
4170/* Opcode VEX.0F 0x8a - invalid */
4171/* Opcode VEX.0F 0x8b - invalid */
4172/* Opcode VEX.0F 0x8c - invalid */
4173/* Opcode VEX.0F 0x8d - invalid */
4174/* Opcode VEX.0F 0x8e - invalid */
4175/* Opcode VEX.0F 0x8f - invalid */
4176/* Opcode VEX.0F 0x90 - invalid */
4177/* Opcode VEX.0F 0x91 - invalid */
4178/* Opcode VEX.0F 0x92 - invalid */
4179/* Opcode VEX.0F 0x93 - invalid */
4180/* Opcode VEX.0F 0x94 - invalid */
4181/* Opcode VEX.0F 0x95 - invalid */
4182/* Opcode VEX.0F 0x96 - invalid */
4183/* Opcode VEX.0F 0x97 - invalid */
4184/* Opcode VEX.0F 0x98 - invalid */
4185/* Opcode VEX.0F 0x99 - invalid */
4186/* Opcode VEX.0F 0x9a - invalid */
4187/* Opcode VEX.0F 0x9b - invalid */
4188/* Opcode VEX.0F 0x9c - invalid */
4189/* Opcode VEX.0F 0x9d - invalid */
4190/* Opcode VEX.0F 0x9e - invalid */
4191/* Opcode VEX.0F 0x9f - invalid */
4192/* Opcode VEX.0F 0xa0 - invalid */
4193/* Opcode VEX.0F 0xa1 - invalid */
4194/* Opcode VEX.0F 0xa2 - invalid */
4195/* Opcode VEX.0F 0xa3 - invalid */
4196/* Opcode VEX.0F 0xa4 - invalid */
4197/* Opcode VEX.0F 0xa5 - invalid */
4198/* Opcode VEX.0F 0xa6 - invalid */
4199/* Opcode VEX.0F 0xa7 - invalid */
4200/* Opcode VEX.0F 0xa8 - invalid */
4201/* Opcode VEX.0F 0xa9 - invalid */
4202/* Opcode VEX.0F 0xaa - invalid */
4203/* Opcode VEX.0F 0xab - invalid */
4204/* Opcode VEX.0F 0xac - invalid */
4205/* Opcode VEX.0F 0xad - invalid */
4206
4207
4208/* Opcode VEX.0F 0xae mem/0 - invalid. */
4209/* Opcode VEX.0F 0xae mem/1 - invalid. */
4210
4211/**
4212 * @ opmaps grp15
4213 * @ opcode !11/2
4214 * @ oppfx none
4215 * @ opcpuid sse
4216 * @ opgroup og_sse_mxcsrsm
4217 * @ opxcpttype 5
4218 * @ optest op1=0 -> mxcsr=0
4219 * @ optest op1=0x2083 -> mxcsr=0x2083
4220 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4221 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4222 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4223 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4224 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4225 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4226 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4227 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4228 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4229 */
4230FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4231//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4232//{
4233// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4234// IEM_MC_BEGIN(2, 0);
4235// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4236// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4237// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4238// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4239// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4240// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4241// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4242// IEM_MC_END();
4243// return VINF_SUCCESS;
4244//}
4245
4246
4247/**
4248 * @opmaps vexgrp15
4249 * @opcode !11/3
4250 * @oppfx none
4251 * @opcpuid avx
4252 * @opgroup og_avx_mxcsrsm
4253 * @opxcpttype 5
4254 * @optest mxcsr=0 -> op1=0
4255 * @optest mxcsr=0x2083 -> op1=0x2083
4256 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4257 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4258 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4259 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4260 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4261 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4262 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4263 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4264 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4265 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4266 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4267 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4268 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4269 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4270 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4271 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4272 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4273 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4274 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4275 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4276 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4277 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4278 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4279 * -> value.xcpt=0x6
4280 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4281 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4282 * APMv4 rev 3.17 page 509.
4283 * @todo Test this instruction on AMD Ryzen.
4284 */
4285FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4286{
4287 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4288 IEM_MC_BEGIN(2, 0);
4289 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4290 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4292 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4294 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4295 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4296 IEM_MC_END();
4297}
4298
4299/* Opcode VEX.0F 0xae mem/4 - invalid. */
4300/* Opcode VEX.0F 0xae mem/5 - invalid. */
4301/* Opcode VEX.0F 0xae mem/6 - invalid. */
4302/* Opcode VEX.0F 0xae mem/7 - invalid. */
4303
4304/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4305/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4306/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4307/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4308/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4309/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4310/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4311/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4312
4313/**
4314 * Vex group 15 jump table for memory variant.
4315 */
4316IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4317{ /* pfx: none, 066h, 0f3h, 0f2h */
4318 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4319 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4320 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4321 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4322 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4323 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4324 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4325 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4326};
4327AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4328
4329
4330/** Opcode vex. 0xae. */
4331FNIEMOP_DEF(iemOp_VGrp15)
4332{
4333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4334 if (IEM_IS_MODRM_REG_MODE(bRm))
4335 /* register, register */
4336 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4337
4338 /* memory, register */
4339 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4340 + pVCpu->iem.s.idxPrefix], bRm);
4341}
4342
4343
4344/* Opcode VEX.0F 0xaf - invalid. */
4345
4346/* Opcode VEX.0F 0xb0 - invalid. */
4347/* Opcode VEX.0F 0xb1 - invalid. */
4348/* Opcode VEX.0F 0xb2 - invalid. */
4349/* Opcode VEX.0F 0xb2 - invalid. */
4350/* Opcode VEX.0F 0xb3 - invalid. */
4351/* Opcode VEX.0F 0xb4 - invalid. */
4352/* Opcode VEX.0F 0xb5 - invalid. */
4353/* Opcode VEX.0F 0xb6 - invalid. */
4354/* Opcode VEX.0F 0xb7 - invalid. */
4355/* Opcode VEX.0F 0xb8 - invalid. */
4356/* Opcode VEX.0F 0xb9 - invalid. */
4357/* Opcode VEX.0F 0xba - invalid. */
4358/* Opcode VEX.0F 0xbb - invalid. */
4359/* Opcode VEX.0F 0xbc - invalid. */
4360/* Opcode VEX.0F 0xbd - invalid. */
4361/* Opcode VEX.0F 0xbe - invalid. */
4362/* Opcode VEX.0F 0xbf - invalid. */
4363
4364/* Opcode VEX.0F 0xc0 - invalid. */
4365/* Opcode VEX.66.0F 0xc0 - invalid. */
4366/* Opcode VEX.F3.0F 0xc0 - invalid. */
4367/* Opcode VEX.F2.0F 0xc0 - invalid. */
4368
4369/* Opcode VEX.0F 0xc1 - invalid. */
4370/* Opcode VEX.66.0F 0xc1 - invalid. */
4371/* Opcode VEX.F3.0F 0xc1 - invalid. */
4372/* Opcode VEX.F2.0F 0xc1 - invalid. */
4373
4374/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4375FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4376/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4377FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4378/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4379FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4380/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4381FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4382
4383/* Opcode VEX.0F 0xc3 - invalid */
4384/* Opcode VEX.66.0F 0xc3 - invalid */
4385/* Opcode VEX.F3.0F 0xc3 - invalid */
4386/* Opcode VEX.F2.0F 0xc3 - invalid */
4387
4388/* Opcode VEX.0F 0xc4 - invalid */
4389
4390
4391/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4392FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4393{
4394 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4396 if (IEM_IS_MODRM_REG_MODE(bRm))
4397 {
4398 /*
4399 * Register, register.
4400 */
4401 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4402 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4403 IEM_MC_BEGIN(4, 0);
4404 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4405 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4406 IEM_MC_ARG(uint16_t, u16Src, 2);
4407 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4408 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4409 IEM_MC_PREPARE_AVX_USAGE();
4410 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4411 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4412 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4413 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4414 puDst, puSrc, u16Src, bImmArg);
4415 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4416 IEM_MC_ADVANCE_RIP_AND_FINISH();
4417 IEM_MC_END();
4418 }
4419 else
4420 {
4421 /*
4422 * Register, memory.
4423 */
4424 IEM_MC_BEGIN(4, 1);
4425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4426 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4427 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4428 IEM_MC_ARG(uint16_t, u16Src, 2);
4429
4430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4431 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4432 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4433 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4434 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4435 IEM_MC_PREPARE_AVX_USAGE();
4436
4437 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4438 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4439 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4440 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4441 puDst, puSrc, u16Src, bImmArg);
4442 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4443
4444 IEM_MC_ADVANCE_RIP_AND_FINISH();
4445 IEM_MC_END();
4446 }
4447}
4448
4449
4450/* Opcode VEX.F3.0F 0xc4 - invalid */
4451/* Opcode VEX.F2.0F 0xc4 - invalid */
4452
4453/* Opcode VEX.0F 0xc5 - invlid */
4454
4455
4456/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4457FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4458{
4459 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4461 if (IEM_IS_MODRM_REG_MODE(bRm))
4462 {
4463 /*
4464 * Register, register.
4465 */
4466 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4467 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4468 IEM_MC_BEGIN(3, 1);
4469 IEM_MC_LOCAL(uint16_t, u16Dst);
4470 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4471 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4472 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4474 IEM_MC_PREPARE_AVX_USAGE();
4475 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4476 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4477 pu16Dst, puSrc, bImmArg);
4478 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4479 IEM_MC_ADVANCE_RIP_AND_FINISH();
4480 IEM_MC_END();
4481 }
4482 /* No memory operand. */
4483 else
4484 return IEMOP_RAISE_INVALID_OPCODE();
4485}
4486
4487
4488/* Opcode VEX.F3.0F 0xc5 - invalid */
4489/* Opcode VEX.F2.0F 0xc5 - invalid */
4490
4491
4492#define VSHUFP_X(a_Instr) \
4493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4494 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4495 { \
4496 /* \
4497 * Register, register. \
4498 */ \
4499 if (pVCpu->iem.s.uVexLength) \
4500 { \
4501 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4502 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4503 IEM_MC_BEGIN(4, 3); \
4504 IEM_MC_LOCAL(RTUINT256U, uDst); \
4505 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4506 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4507 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4508 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4509 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4510 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4511 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4512 IEM_MC_PREPARE_AVX_USAGE(); \
4513 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4514 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4515 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4516 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4517 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4518 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4519 IEM_MC_END(); \
4520 } \
4521 else \
4522 { \
4523 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4524 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4525 IEM_MC_BEGIN(4, 0); \
4526 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4527 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4528 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4531 IEM_MC_PREPARE_AVX_USAGE(); \
4532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4533 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4534 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4535 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4536 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4537 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4538 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4539 IEM_MC_END(); \
4540 } \
4541 } \
4542 else \
4543 { \
4544 /* \
4545 * Register, memory. \
4546 */ \
4547 if (pVCpu->iem.s.uVexLength) \
4548 { \
4549 IEM_MC_BEGIN(4, 4); \
4550 IEM_MC_LOCAL(RTUINT256U, uDst); \
4551 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4552 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4554 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4555 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4556 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4558 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4559 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4560 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4561 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4562 IEM_MC_PREPARE_AVX_USAGE(); \
4563 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4564 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4565 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4566 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4567 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4569 IEM_MC_END(); \
4570 } \
4571 else \
4572 { \
4573 IEM_MC_BEGIN(4, 2); \
4574 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4576 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4577 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4578 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4580 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4581 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4582 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4584 IEM_MC_PREPARE_AVX_USAGE(); \
4585 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4586 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4587 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4588 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4589 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4590 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4591 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4592 IEM_MC_END(); \
4593 } \
4594 } \
4595 (void)0
4596
4597/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4598FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4599{
4600 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4601 VSHUFP_X(vshufps);
4602}
4603
4604
4605/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4606FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4607{
4608 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4609 VSHUFP_X(vshufpd);
4610}
4611#undef VSHUFP_X
4612
4613
4614/* Opcode VEX.F3.0F 0xc6 - invalid */
4615/* Opcode VEX.F2.0F 0xc6 - invalid */
4616
4617/* Opcode VEX.0F 0xc7 - invalid */
4618/* Opcode VEX.66.0F 0xc7 - invalid */
4619/* Opcode VEX.F3.0F 0xc7 - invalid */
4620/* Opcode VEX.F2.0F 0xc7 - invalid */
4621
4622/* Opcode VEX.0F 0xc8 - invalid */
4623/* Opcode VEX.0F 0xc9 - invalid */
4624/* Opcode VEX.0F 0xca - invalid */
4625/* Opcode VEX.0F 0xcb - invalid */
4626/* Opcode VEX.0F 0xcc - invalid */
4627/* Opcode VEX.0F 0xcd - invalid */
4628/* Opcode VEX.0F 0xce - invalid */
4629/* Opcode VEX.0F 0xcf - invalid */
4630
4631
4632/* Opcode VEX.0F 0xd0 - invalid */
4633/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4634FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4635/* Opcode VEX.F3.0F 0xd0 - invalid */
4636/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4637FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4638
4639/* Opcode VEX.0F 0xd1 - invalid */
4640/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4641FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4642/* Opcode VEX.F3.0F 0xd1 - invalid */
4643/* Opcode VEX.F2.0F 0xd1 - invalid */
4644
4645/* Opcode VEX.0F 0xd2 - invalid */
4646/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4647FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4648/* Opcode VEX.F3.0F 0xd2 - invalid */
4649/* Opcode VEX.F2.0F 0xd2 - invalid */
4650
4651/* Opcode VEX.0F 0xd3 - invalid */
4652/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4653FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4654/* Opcode VEX.F3.0F 0xd3 - invalid */
4655/* Opcode VEX.F2.0F 0xd3 - invalid */
4656
4657/* Opcode VEX.0F 0xd4 - invalid */
4658
4659
4660/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4661FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4662{
4663 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4664 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4665 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4666}
4667
4668
4669/* Opcode VEX.F3.0F 0xd4 - invalid */
4670/* Opcode VEX.F2.0F 0xd4 - invalid */
4671
4672/* Opcode VEX.0F 0xd5 - invalid */
4673
4674
4675/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4676FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4677{
4678 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4679 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4680 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4681}
4682
4683
4684/* Opcode VEX.F3.0F 0xd5 - invalid */
4685/* Opcode VEX.F2.0F 0xd5 - invalid */
4686
4687/* Opcode VEX.0F 0xd6 - invalid */
4688
4689/**
4690 * @opcode 0xd6
4691 * @oppfx 0x66
4692 * @opcpuid avx
4693 * @opgroup og_avx_pcksclr_datamove
4694 * @opxcpttype none
4695 * @optest op1=-1 op2=2 -> op1=2
4696 * @optest op1=0 op2=-42 -> op1=-42
4697 */
4698FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4699{
4700 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4702 if (IEM_IS_MODRM_REG_MODE(bRm))
4703 {
4704 /*
4705 * Register, register.
4706 */
4707 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4708 IEM_MC_BEGIN(0, 0);
4709
4710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4711 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4712
4713 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4714 IEM_GET_MODRM_REG(pVCpu, bRm));
4715 IEM_MC_ADVANCE_RIP_AND_FINISH();
4716 IEM_MC_END();
4717 }
4718 else
4719 {
4720 /*
4721 * Memory, register.
4722 */
4723 IEM_MC_BEGIN(0, 2);
4724 IEM_MC_LOCAL(uint64_t, uSrc);
4725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4726
4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4728 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4730 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4731
4732 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4733 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4734
4735 IEM_MC_ADVANCE_RIP_AND_FINISH();
4736 IEM_MC_END();
4737 }
4738}
4739
4740/* Opcode VEX.F3.0F 0xd6 - invalid */
4741/* Opcode VEX.F2.0F 0xd6 - invalid */
4742
4743
4744/* Opcode VEX.0F 0xd7 - invalid */
4745
4746/** Opcode VEX.66.0F 0xd7 - */
4747FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4748{
4749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4750 /* Docs says register only. */
4751 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4752 {
4753 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4754 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4755 if (pVCpu->iem.s.uVexLength)
4756 {
4757 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4758 IEM_MC_BEGIN(2, 1);
4759 IEM_MC_ARG(uint64_t *, puDst, 0);
4760 IEM_MC_LOCAL(RTUINT256U, uSrc);
4761 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4762 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4763 IEM_MC_PREPARE_AVX_USAGE();
4764 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4765 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4766 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4767 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4768 IEM_MC_ADVANCE_RIP_AND_FINISH();
4769 IEM_MC_END();
4770 }
4771 else
4772 {
4773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4774 IEM_MC_BEGIN(2, 0);
4775 IEM_MC_ARG(uint64_t *, puDst, 0);
4776 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4777 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4778 IEM_MC_PREPARE_AVX_USAGE();
4779 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4780 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4781 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4782 IEM_MC_ADVANCE_RIP_AND_FINISH();
4783 IEM_MC_END();
4784 }
4785 }
4786 else
4787 return IEMOP_RAISE_INVALID_OPCODE();
4788}
4789
4790
4791/* Opcode VEX.F3.0F 0xd7 - invalid */
4792/* Opcode VEX.F2.0F 0xd7 - invalid */
4793
4794
4795/* Opcode VEX.0F 0xd8 - invalid */
4796/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4797FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4798/* Opcode VEX.F3.0F 0xd8 - invalid */
4799/* Opcode VEX.F2.0F 0xd8 - invalid */
4800
4801/* Opcode VEX.0F 0xd9 - invalid */
4802/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4803FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4804/* Opcode VEX.F3.0F 0xd9 - invalid */
4805/* Opcode VEX.F2.0F 0xd9 - invalid */
4806
4807/* Opcode VEX.0F 0xda - invalid */
4808
4809
4810/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4811FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4812{
4813 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4814 IEMOPMEDIAF3_INIT_VARS(vpminub);
4815 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4816}
4817
4818
4819/* Opcode VEX.F3.0F 0xda - invalid */
4820/* Opcode VEX.F2.0F 0xda - invalid */
4821
4822/* Opcode VEX.0F 0xdb - invalid */
4823
4824
4825/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4826FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4827{
4828 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4829 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4830 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4831}
4832
4833
4834/* Opcode VEX.F3.0F 0xdb - invalid */
4835/* Opcode VEX.F2.0F 0xdb - invalid */
4836
4837/* Opcode VEX.0F 0xdc - invalid */
4838/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4839FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4840/* Opcode VEX.F3.0F 0xdc - invalid */
4841/* Opcode VEX.F2.0F 0xdc - invalid */
4842
4843/* Opcode VEX.0F 0xdd - invalid */
4844/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4845FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4846/* Opcode VEX.F3.0F 0xdd - invalid */
4847/* Opcode VEX.F2.0F 0xdd - invalid */
4848
4849/* Opcode VEX.0F 0xde - invalid */
4850
4851
4852/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4853FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4854{
4855 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4856 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4857 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4858}
4859
4860
4861/* Opcode VEX.F3.0F 0xde - invalid */
4862/* Opcode VEX.F2.0F 0xde - invalid */
4863
4864/* Opcode VEX.0F 0xdf - invalid */
4865
4866
4867/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4868FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4869{
4870 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4871 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4872 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4873}
4874
4875
4876/* Opcode VEX.F3.0F 0xdf - invalid */
4877/* Opcode VEX.F2.0F 0xdf - invalid */
4878
4879/* Opcode VEX.0F 0xe0 - invalid */
4880
4881
4882/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4883FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4884{
4885 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4886 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4887 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4888}
4889
4890
4891/* Opcode VEX.F3.0F 0xe0 - invalid */
4892/* Opcode VEX.F2.0F 0xe0 - invalid */
4893
4894/* Opcode VEX.0F 0xe1 - invalid */
4895/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4896FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4897/* Opcode VEX.F3.0F 0xe1 - invalid */
4898/* Opcode VEX.F2.0F 0xe1 - invalid */
4899
4900/* Opcode VEX.0F 0xe2 - invalid */
4901/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4902FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4903/* Opcode VEX.F3.0F 0xe2 - invalid */
4904/* Opcode VEX.F2.0F 0xe2 - invalid */
4905
4906/* Opcode VEX.0F 0xe3 - invalid */
4907
4908
4909/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4910FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4911{
4912 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4913 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4914 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4915}
4916
4917
4918/* Opcode VEX.F3.0F 0xe3 - invalid */
4919/* Opcode VEX.F2.0F 0xe3 - invalid */
4920
4921/* Opcode VEX.0F 0xe4 - invalid */
4922
4923
4924/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4925FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4926{
4927 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4928 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4929 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4930}
4931
4932
4933/* Opcode VEX.F3.0F 0xe4 - invalid */
4934/* Opcode VEX.F2.0F 0xe4 - invalid */
4935
4936/* Opcode VEX.0F 0xe5 - invalid */
4937
4938
4939/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4940FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4941{
4942 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4943 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4944 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4945}
4946
4947
4948/* Opcode VEX.F3.0F 0xe5 - invalid */
4949/* Opcode VEX.F2.0F 0xe5 - invalid */
4950
4951/* Opcode VEX.0F 0xe6 - invalid */
4952/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4953FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4954/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4955FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4956/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4957FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4958
4959
4960/* Opcode VEX.0F 0xe7 - invalid */
4961
4962/**
4963 * @opcode 0xe7
4964 * @opcodesub !11 mr/reg
4965 * @oppfx 0x66
4966 * @opcpuid avx
4967 * @opgroup og_avx_cachect
4968 * @opxcpttype 1
4969 * @optest op1=-1 op2=2 -> op1=2
4970 * @optest op1=0 op2=-42 -> op1=-42
4971 */
4972FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4973{
4974 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4975 Assert(pVCpu->iem.s.uVexLength <= 1);
4976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4977 if (IEM_IS_MODRM_MEM_MODE(bRm))
4978 {
4979 if (pVCpu->iem.s.uVexLength == 0)
4980 {
4981 /*
4982 * 128-bit: Memory, register.
4983 */
4984 IEM_MC_BEGIN(0, 2);
4985 IEM_MC_LOCAL(RTUINT128U, uSrc);
4986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4987
4988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4989 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4991 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4992
4993 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4994 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4995
4996 IEM_MC_ADVANCE_RIP_AND_FINISH();
4997 IEM_MC_END();
4998 }
4999 else
5000 {
5001 /*
5002 * 256-bit: Memory, register.
5003 */
5004 IEM_MC_BEGIN(0, 2);
5005 IEM_MC_LOCAL(RTUINT256U, uSrc);
5006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5007
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5009 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5010 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5011 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5012
5013 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5014 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5015
5016 IEM_MC_ADVANCE_RIP_AND_FINISH();
5017 IEM_MC_END();
5018 }
5019 }
5020 /**
5021 * @opdone
5022 * @opmnemonic udvex660fe7reg
5023 * @opcode 0xe7
5024 * @opcodesub 11 mr/reg
5025 * @oppfx 0x66
5026 * @opunused immediate
5027 * @opcpuid avx
5028 * @optest ->
5029 */
5030 else
5031 return IEMOP_RAISE_INVALID_OPCODE();
5032}
5033
5034/* Opcode VEX.F3.0F 0xe7 - invalid */
5035/* Opcode VEX.F2.0F 0xe7 - invalid */
5036
5037
5038/* Opcode VEX.0F 0xe8 - invalid */
5039/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
5040FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
5041/* Opcode VEX.F3.0F 0xe8 - invalid */
5042/* Opcode VEX.F2.0F 0xe8 - invalid */
5043
5044/* Opcode VEX.0F 0xe9 - invalid */
5045/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5046FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
5047/* Opcode VEX.F3.0F 0xe9 - invalid */
5048/* Opcode VEX.F2.0F 0xe9 - invalid */
5049
5050/* Opcode VEX.0F 0xea - invalid */
5051
5052
5053/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5054FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5055{
5056 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5057 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5058 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5059}
5060
5061
5062/* Opcode VEX.F3.0F 0xea - invalid */
5063/* Opcode VEX.F2.0F 0xea - invalid */
5064
5065/* Opcode VEX.0F 0xeb - invalid */
5066
5067
5068/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5069FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5070{
5071 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5072 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5073 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5074}
5075
5076
5077
5078/* Opcode VEX.F3.0F 0xeb - invalid */
5079/* Opcode VEX.F2.0F 0xeb - invalid */
5080
5081/* Opcode VEX.0F 0xec - invalid */
5082/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5083FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
5084/* Opcode VEX.F3.0F 0xec - invalid */
5085/* Opcode VEX.F2.0F 0xec - invalid */
5086
5087/* Opcode VEX.0F 0xed - invalid */
5088/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5089FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
5090/* Opcode VEX.F3.0F 0xed - invalid */
5091/* Opcode VEX.F2.0F 0xed - invalid */
5092
5093/* Opcode VEX.0F 0xee - invalid */
5094
5095
5096/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5097FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5098{
5099 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5100 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5101 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5102}
5103
5104
5105/* Opcode VEX.F3.0F 0xee - invalid */
5106/* Opcode VEX.F2.0F 0xee - invalid */
5107
5108
5109/* Opcode VEX.0F 0xef - invalid */
5110
5111
5112/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5113FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5114{
5115 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5117 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5118}
5119
5120
5121/* Opcode VEX.F3.0F 0xef - invalid */
5122/* Opcode VEX.F2.0F 0xef - invalid */
5123
5124/* Opcode VEX.0F 0xf0 - invalid */
5125/* Opcode VEX.66.0F 0xf0 - invalid */
5126
5127
5128/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5129FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5130{
5131 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5132 Assert(pVCpu->iem.s.uVexLength <= 1);
5133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5134 if (IEM_IS_MODRM_REG_MODE(bRm))
5135 {
5136 /*
5137 * Register, register - (not implemented, assuming it raises \#UD).
5138 */
5139 return IEMOP_RAISE_INVALID_OPCODE();
5140 }
5141 else if (pVCpu->iem.s.uVexLength == 0)
5142 {
5143 /*
5144 * Register, memory128.
5145 */
5146 IEM_MC_BEGIN(0, 2);
5147 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5149
5150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5151 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5152 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5153 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5154
5155 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5156 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5157
5158 IEM_MC_ADVANCE_RIP_AND_FINISH();
5159 IEM_MC_END();
5160 }
5161 else
5162 {
5163 /*
5164 * Register, memory256.
5165 */
5166 IEM_MC_BEGIN(0, 2);
5167 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5169
5170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5171 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5173 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5174
5175 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5176 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5177
5178 IEM_MC_ADVANCE_RIP_AND_FINISH();
5179 IEM_MC_END();
5180 }
5181}
5182
5183
5184/* Opcode VEX.0F 0xf1 - invalid */
5185/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5186FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5187/* Opcode VEX.F2.0F 0xf1 - invalid */
5188
5189/* Opcode VEX.0F 0xf2 - invalid */
5190/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5191FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5192/* Opcode VEX.F2.0F 0xf2 - invalid */
5193
5194/* Opcode VEX.0F 0xf3 - invalid */
5195/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5196FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5197/* Opcode VEX.F2.0F 0xf3 - invalid */
5198
5199/* Opcode VEX.0F 0xf4 - invalid */
5200
5201
5202/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5203FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5204{
5205 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5206 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5207 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5208}
5209
5210
5211/* Opcode VEX.F2.0F 0xf4 - invalid */
5212
5213/* Opcode VEX.0F 0xf5 - invalid */
5214/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5215FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5216/* Opcode VEX.F2.0F 0xf5 - invalid */
5217
5218/* Opcode VEX.0F 0xf6 - invalid */
5219
5220
5221/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5222FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5223{
5224 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5225 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5226 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5227}
5228
5229
5230/* Opcode VEX.F2.0F 0xf6 - invalid */
5231
5232/* Opcode VEX.0F 0xf7 - invalid */
5233/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5234FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5235/* Opcode VEX.F2.0F 0xf7 - invalid */
5236
5237/* Opcode VEX.0F 0xf8 - invalid */
5238
5239
5240/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5241FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5242{
5243 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5244 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5245 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5246}
5247
5248
5249/* Opcode VEX.F2.0F 0xf8 - invalid */
5250
5251/* Opcode VEX.0F 0xf9 - invalid */
5252
5253
5254/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5255FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5256{
5257 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5258 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5259 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5260}
5261
5262
5263/* Opcode VEX.F2.0F 0xf9 - invalid */
5264
5265/* Opcode VEX.0F 0xfa - invalid */
5266
5267
5268/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5269FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5270{
5271 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5272 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5273 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5274}
5275
5276
5277/* Opcode VEX.F2.0F 0xfa - invalid */
5278
5279/* Opcode VEX.0F 0xfb - invalid */
5280
5281
5282/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5283FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5284{
5285 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5286 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5287 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5288}
5289
5290
5291/* Opcode VEX.F2.0F 0xfb - invalid */
5292
5293/* Opcode VEX.0F 0xfc - invalid */
5294
5295
5296/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5297FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5298{
5299 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5300 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5301 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5302}
5303
5304
5305/* Opcode VEX.F2.0F 0xfc - invalid */
5306
5307/* Opcode VEX.0F 0xfd - invalid */
5308
5309
5310/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5311FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5312{
5313 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5314 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5315 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5316}
5317
5318
5319/* Opcode VEX.F2.0F 0xfd - invalid */
5320
5321/* Opcode VEX.0F 0xfe - invalid */
5322
5323
5324/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5325FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5326{
5327 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5328 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5329 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5330}
5331
5332
5333/* Opcode VEX.F2.0F 0xfe - invalid */
5334
5335
5336/** Opcode **** 0x0f 0xff - UD0 */
5337FNIEMOP_DEF(iemOp_vud0)
5338{
5339 IEMOP_MNEMONIC(vud0, "vud0");
5340 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5341 {
5342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5343#ifndef TST_IEM_CHECK_MC
5344 RTGCPTR GCPtrEff;
5345 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5346 if (rcStrict != VINF_SUCCESS)
5347 return rcStrict;
5348#endif
5349 IEMOP_HLP_DONE_DECODING();
5350 }
5351 return IEMOP_RAISE_INVALID_OPCODE();
5352}
5353
5354
5355
5356/**
5357 * VEX opcode map \#1.
5358 *
5359 * @sa g_apfnTwoByteMap
5360 */
5361IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5362{
5363 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5364 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5365 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5366 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5367 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5368 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5369 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5370 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5371 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5372 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5373 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5374 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5375 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5376 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5377 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5378 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5379 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5380
5381 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5382 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5383 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5384 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5385 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5386 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5387 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5388 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5389 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5390 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5391 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5392 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5393 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5394 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5395 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5396 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5397
5398 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5399 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5400 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5401 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5402 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5403 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5404 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5405 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5406 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5407 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5408 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5409 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5410 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5411 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5412 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5413 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5414
5415 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5416 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5417 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5418 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5419 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5420 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5421 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5422 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5423 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5424 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5425 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5426 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5427 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5428 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5429 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5430 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5431
5432 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5433 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5434 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5435 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5436 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5437 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5438 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5439 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5440 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5441 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5442 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5443 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5444 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5445 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5446 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5447 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5448
5449 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5450 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5451 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5452 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5453 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5454 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5455 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5456 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5457 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5458 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5459 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5460 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5461 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5462 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5463 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5464 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5465
5466 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5467 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5468 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5469 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5470 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5471 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5472 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5473 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5474 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5475 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5476 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5477 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5478 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5479 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5480 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5481 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5482
5483 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5484 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5485 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5486 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5487 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5488 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5489 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5490 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5491 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5492 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5496 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5497 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5498 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5499
5500 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5501 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5502 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5503 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5504 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5505 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5506 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5507 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5508 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5509 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5510 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5511 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5512 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5513 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5514 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5515 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5516
5517 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5518 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5519 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5520 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5521 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5522 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5523 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5524 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5525 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5526 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5527 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5528 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5529 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5530 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5531 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5532 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5533
5534 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5535 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5536 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5537 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5538 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5539 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5540 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5541 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5542 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5543 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5544 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5545 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5546 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5547 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5548 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5549 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5550
5551 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5552 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5553 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5554 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5555 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5556 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5557 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5558 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5559 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5560 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5561 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5562 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5563 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5564 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5565 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5566 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5567
5568 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5569 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5570 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5571 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5572 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5573 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5574 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5575 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5576 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5577 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5578 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5579 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5580 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5581 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5582 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5583 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5584
5585 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5586 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5587 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5588 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5589 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5590 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5591 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5592 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5593 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5594 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5595 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5596 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5597 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5598 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5599 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5600 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5601
5602 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5603 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5604 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5605 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5606 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5607 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5608 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5609 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5610 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5611 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5612 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5613 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5614 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5615 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5616 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5617 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5618
5619 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5620 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5621 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5622 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5623 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5624 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5625 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5626 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5627 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5628 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5629 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5630 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5631 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5632 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5633 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5634 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5635};
5636AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5637/** @} */
5638
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette