VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 100732

Last change on this file since 100732 was 100714, checked in by vboxsync, 21 months ago

VMM/IEM: Require a IEMOP_HLP_DONE_DECODING in all MC blocks so we know exacly when the recompiler starts emitting code (calls) and we can make sure it's still safe to restart insturction decoding. Also made the python script check this and that nothing that smells like decoding happens after IEMOP_HLP_DONE_DECODING and its friends. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 208.4 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 100714 2023-07-27 10:12:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(4, 3);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEM_MC_BEGIN(4, 0);
74 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145}
146
147
148/**
149 * Common worker for AVX2 instructions on the forms:
150 * - vpxxx xmm0, xmm1, xmm2/mem128
151 * - vpxxx ymm0, ymm1, ymm2/mem256
152 *
153 * Takes function table for function w/o implicit state parameter.
154 *
155 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
156 */
157FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
158{
159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
160 if (IEM_IS_MODRM_REG_MODE(bRm))
161 {
162 /*
163 * Register, register.
164 */
165 if (pVCpu->iem.s.uVexLength)
166 {
167 IEM_MC_BEGIN(3, 3);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_LOCAL(RTUINT256U, uDst);
170 IEM_MC_LOCAL(RTUINT256U, uSrc1);
171 IEM_MC_LOCAL(RTUINT256U, uSrc2);
172 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
173 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
178 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
180 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_ADVANCE_RIP_AND_FINISH();
182 IEM_MC_END();
183 }
184 else
185 {
186 IEM_MC_BEGIN(3, 0);
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_ARG(PRTUINT128U, puDst, 0);
189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_PREPARE_AVX_USAGE();
193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
194 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
195 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
197 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
198 IEM_MC_ADVANCE_RIP_AND_FINISH();
199 IEM_MC_END();
200 }
201 }
202 else
203 {
204 /*
205 * Register, memory.
206 */
207 if (pVCpu->iem.s.uVexLength)
208 {
209 IEM_MC_BEGIN(3, 4);
210 IEM_MC_LOCAL(RTUINT256U, uDst);
211 IEM_MC_LOCAL(RTUINT256U, uSrc1);
212 IEM_MC_LOCAL(RTUINT256U, uSrc2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
214 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
215 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
217
218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
219 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222
223 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
224 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
226 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
227
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(3, 2);
234 IEM_MC_LOCAL(RTUINT128U, uSrc2);
235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
236 IEM_MC_ARG(PRTUINT128U, puDst, 0);
237 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
239
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
243 IEM_MC_PREPARE_AVX_USAGE();
244
245 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
249 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
250
251 IEM_MC_ADVANCE_RIP_AND_FINISH();
252 IEM_MC_END();
253 }
254 }
255}
256
257
258/**
259 * Common worker for AVX2 instructions on the forms:
260 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
261 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
262 *
263 * The 128-bit memory version of this instruction may elect to skip fetching the
264 * lower 64 bits of the operand. We, however, do not.
265 *
266 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
267 */
268FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
269{
270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
271}
272
273
274/**
275 * Common worker for AVX2 instructions on the forms:
276 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
277 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
278 *
279 * The 128-bit memory version of this instruction may elect to skip fetching the
280 * higher 64 bits of the operand. We, however, do not.
281 *
282 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
283 */
284FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
285{
286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
287}
288
289
290/**
291 * Common worker for AVX2 instructions on the forms:
292 * - vpxxx xmm0, xmm1/mem128
293 * - vpxxx ymm0, ymm1/mem256
294 *
295 * Takes function table for function w/o implicit state parameter.
296 *
297 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
298 */
299FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
300{
301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
302 if (IEM_IS_MODRM_REG_MODE(bRm))
303 {
304 /*
305 * Register, register.
306 */
307 if (pVCpu->iem.s.uVexLength)
308 {
309 IEM_MC_BEGIN(2, 2);
310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
311 IEM_MC_LOCAL(RTUINT256U, uDst);
312 IEM_MC_LOCAL(RTUINT256U, uSrc);
313 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
314 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
316 IEM_MC_PREPARE_AVX_USAGE();
317 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
319 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
320 IEM_MC_ADVANCE_RIP_AND_FINISH();
321 IEM_MC_END();
322 }
323 else
324 {
325 IEM_MC_BEGIN(2, 0);
326 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
327 IEM_MC_ARG(PRTUINT128U, puDst, 0);
328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_PREPARE_AVX_USAGE();
331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
332 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
333 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
334 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
335 IEM_MC_ADVANCE_RIP_AND_FINISH();
336 IEM_MC_END();
337 }
338 }
339 else
340 {
341 /*
342 * Register, memory.
343 */
344 if (pVCpu->iem.s.uVexLength)
345 {
346 IEM_MC_BEGIN(2, 3);
347 IEM_MC_LOCAL(RTUINT256U, uDst);
348 IEM_MC_LOCAL(RTUINT256U, uSrc);
349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
350 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
352
353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
354 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
356 IEM_MC_PREPARE_AVX_USAGE();
357
358 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
360 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365 else
366 {
367 IEM_MC_BEGIN(2, 2);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
371 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
372
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
376 IEM_MC_PREPARE_AVX_USAGE();
377
378 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
379 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
380 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
382
383 IEM_MC_ADVANCE_RIP_AND_FINISH();
384 IEM_MC_END();
385 }
386 }
387}
388
389
390/* Opcode VEX.0F 0x00 - invalid */
391/* Opcode VEX.0F 0x01 - invalid */
392/* Opcode VEX.0F 0x02 - invalid */
393/* Opcode VEX.0F 0x03 - invalid */
394/* Opcode VEX.0F 0x04 - invalid */
395/* Opcode VEX.0F 0x05 - invalid */
396/* Opcode VEX.0F 0x06 - invalid */
397/* Opcode VEX.0F 0x07 - invalid */
398/* Opcode VEX.0F 0x08 - invalid */
399/* Opcode VEX.0F 0x09 - invalid */
400/* Opcode VEX.0F 0x0a - invalid */
401
402/** Opcode VEX.0F 0x0b. */
403FNIEMOP_DEF(iemOp_vud2)
404{
405 IEMOP_MNEMONIC(vud2, "vud2");
406 IEMOP_RAISE_INVALID_OPCODE_RET();
407}
408
409/* Opcode VEX.0F 0x0c - invalid */
410/* Opcode VEX.0F 0x0d - invalid */
411/* Opcode VEX.0F 0x0e - invalid */
412/* Opcode VEX.0F 0x0f - invalid */
413
414
415/**
416 * @opcode 0x10
417 * @oppfx none
418 * @opcpuid avx
419 * @opgroup og_avx_simdfp_datamove
420 * @opxcpttype 4UA
421 * @optest op1=1 op2=2 -> op1=2
422 * @optest op1=0 op2=-22 -> op1=-22
423 */
424FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
425{
426 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
427 Assert(pVCpu->iem.s.uVexLength <= 1);
428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
429 if (IEM_IS_MODRM_REG_MODE(bRm))
430 {
431 /*
432 * Register, register.
433 */
434 IEM_MC_BEGIN(0, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
438 if (pVCpu->iem.s.uVexLength == 0)
439 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
440 IEM_GET_MODRM_RM(pVCpu, bRm));
441 else
442 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 IEM_MC_ADVANCE_RIP_AND_FINISH();
445 IEM_MC_END();
446 }
447 else if (pVCpu->iem.s.uVexLength == 0)
448 {
449 /*
450 * 128-bit: Register, Memory
451 */
452 IEM_MC_BEGIN(0, 2);
453 IEM_MC_LOCAL(RTUINT128U, uSrc);
454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455
456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
460
461 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
463
464 IEM_MC_ADVANCE_RIP_AND_FINISH();
465 IEM_MC_END();
466 }
467 else
468 {
469 /*
470 * 256-bit: Register, Memory
471 */
472 IEM_MC_BEGIN(0, 2);
473 IEM_MC_LOCAL(RTUINT256U, uSrc);
474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
480
481 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
483
484 IEM_MC_ADVANCE_RIP_AND_FINISH();
485 IEM_MC_END();
486 }
487}
488
489
490/**
491 * @opcode 0x10
492 * @oppfx 0x66
493 * @opcpuid avx
494 * @opgroup og_avx_simdfp_datamove
495 * @opxcpttype 4UA
496 * @optest op1=1 op2=2 -> op1=2
497 * @optest op1=0 op2=-22 -> op1=-22
498 */
499FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
500{
501 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
502 Assert(pVCpu->iem.s.uVexLength <= 1);
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 if (IEM_IS_MODRM_REG_MODE(bRm))
505 {
506 /*
507 * Register, register.
508 */
509 IEM_MC_BEGIN(0, 0);
510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
513 if (pVCpu->iem.s.uVexLength == 0)
514 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
515 IEM_GET_MODRM_RM(pVCpu, bRm));
516 else
517 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
518 IEM_GET_MODRM_RM(pVCpu, bRm));
519 IEM_MC_ADVANCE_RIP_AND_FINISH();
520 IEM_MC_END();
521 }
522 else if (pVCpu->iem.s.uVexLength == 0)
523 {
524 /*
525 * 128-bit: Memory, register.
526 */
527 IEM_MC_BEGIN(0, 2);
528 IEM_MC_LOCAL(RTUINT128U, uSrc);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
530
531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
535
536 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
537 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
538
539 IEM_MC_ADVANCE_RIP_AND_FINISH();
540 IEM_MC_END();
541 }
542 else
543 {
544 /*
545 * 256-bit: Memory, register.
546 */
547 IEM_MC_BEGIN(0, 2);
548 IEM_MC_LOCAL(RTUINT256U, uSrc);
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
555
556 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
558
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562}
563
564
565FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
566{
567 Assert(pVCpu->iem.s.uVexLength <= 1);
568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
569 if (IEM_IS_MODRM_REG_MODE(bRm))
570 {
571 /**
572 * @opcode 0x10
573 * @oppfx 0xf3
574 * @opcodesub 11 mr/reg
575 * @opcpuid avx
576 * @opgroup og_avx_simdfp_datamerge
577 * @opxcpttype 5
578 * @optest op1=1 op2=0 op3=2 -> op1=2
579 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
580 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
581 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
582 * @note HssHi refers to bits 127:32.
583 */
584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
585 IEM_MC_BEGIN(0, 0);
586 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
589 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
590 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
591 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 else
596 {
597 /**
598 * @opdone
599 * @opcode 0x10
600 * @oppfx 0xf3
601 * @opcodesub !11 mr/reg
602 * @opcpuid avx
603 * @opgroup og_avx_simdfp_datamove
604 * @opxcpttype 5
605 * @opfunction iemOp_vmovss_Vss_Hss_Wss
606 * @optest op1=1 op2=2 -> op1=2
607 * @optest op1=0 op2=-22 -> op1=-22
608 */
609 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
610 IEM_MC_BEGIN(0, 2);
611 IEM_MC_LOCAL(uint32_t, uSrc);
612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
613
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
618
619 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
620 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
621
622 IEM_MC_ADVANCE_RIP_AND_FINISH();
623 IEM_MC_END();
624 }
625}
626
627
628FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
629{
630 Assert(pVCpu->iem.s.uVexLength <= 1);
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (IEM_IS_MODRM_REG_MODE(bRm))
633 {
634 /**
635 * @opcode 0x10
636 * @oppfx 0xf2
637 * @opcodesub 11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamerge
640 * @opxcpttype 5
641 * @optest op1=1 op2=0 op3=2 -> op1=2
642 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
643 * @optest op1=3 op2=-1 op3=0x77 ->
644 * op1=0xffffffffffffffff0000000000000077
645 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
646 */
647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
648 IEM_MC_BEGIN(0, 0);
649 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
650
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
653 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
654 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
655 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /**
662 * @opdone
663 * @opcode 0x10
664 * @oppfx 0xf2
665 * @opcodesub !11 mr/reg
666 * @opcpuid avx
667 * @opgroup og_avx_simdfp_datamove
668 * @opxcpttype 5
669 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
670 * @optest op1=1 op2=2 -> op1=2
671 * @optest op1=0 op2=-22 -> op1=-22
672 */
673 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
674 IEM_MC_BEGIN(0, 2);
675 IEM_MC_LOCAL(uint64_t, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEM_MC_BEGIN(0, 0);
712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764}
765
766
767/**
768 * @opcode 0x11
769 * @oppfx 0x66
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 4UA
773 * @optest op1=1 op2=2 -> op1=2
774 * @optest op1=0 op2=-22 -> op1=-22
775 */
776FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
777{
778 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
779 Assert(pVCpu->iem.s.uVexLength <= 1);
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEM_MC_BEGIN(0, 0);
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790 if (pVCpu->iem.s.uVexLength == 0)
791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
792 IEM_GET_MODRM_REG(pVCpu, bRm));
793 else
794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
795 IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else if (pVCpu->iem.s.uVexLength == 0)
800 {
801 /*
802 * 128-bit: Memory, register.
803 */
804 IEM_MC_BEGIN(0, 2);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807
808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
812
813 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
814 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
815
816 IEM_MC_ADVANCE_RIP_AND_FINISH();
817 IEM_MC_END();
818 }
819 else
820 {
821 /*
822 * 256-bit: Memory, register.
823 */
824 IEM_MC_BEGIN(0, 2);
825 IEM_MC_LOCAL(RTUINT256U, uSrc);
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
832
833 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
834 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
835
836 IEM_MC_ADVANCE_RIP_AND_FINISH();
837 IEM_MC_END();
838 }
839}
840
841
842FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
843{
844 Assert(pVCpu->iem.s.uVexLength <= 1);
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /**
849 * @opcode 0x11
850 * @oppfx 0xf3
851 * @opcodesub 11 mr/reg
852 * @opcpuid avx
853 * @opgroup og_avx_simdfp_datamerge
854 * @opxcpttype 5
855 * @optest op1=1 op2=0 op3=2 -> op1=2
856 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
857 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
858 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
859 */
860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
861 IEM_MC_BEGIN(0, 0);
862 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
863
864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
865 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
866 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
867 IEM_GET_MODRM_REG(pVCpu, bRm),
868 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872 else
873 {
874 /**
875 * @opdone
876 * @opcode 0x11
877 * @oppfx 0xf3
878 * @opcodesub !11 mr/reg
879 * @opcpuid avx
880 * @opgroup og_avx_simdfp_datamove
881 * @opxcpttype 5
882 * @opfunction iemOp_vmovss_Vss_Hss_Wss
883 * @optest op1=1 op2=2 -> op1=2
884 * @optest op1=0 op2=-22 -> op1=-22
885 */
886 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
887 IEM_MC_BEGIN(0, 2);
888 IEM_MC_LOCAL(uint32_t, uSrc);
889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
890
891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
895
896 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
898
899 IEM_MC_ADVANCE_RIP_AND_FINISH();
900 IEM_MC_END();
901 }
902}
903
904
905FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
906{
907 Assert(pVCpu->iem.s.uVexLength <= 1);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if (IEM_IS_MODRM_REG_MODE(bRm))
910 {
911 /**
912 * @opcode 0x11
913 * @oppfx 0xf2
914 * @opcodesub 11 mr/reg
915 * @opcpuid avx
916 * @opgroup og_avx_simdfp_datamerge
917 * @opxcpttype 5
918 * @optest op1=1 op2=0 op3=2 -> op1=2
919 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
920 * @optest op1=3 op2=-1 op3=0x77 ->
921 * op1=0xffffffffffffffff0000000000000077
922 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
923 */
924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
925 IEM_MC_BEGIN(0, 0);
926 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
927
928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
930 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
931 IEM_GET_MODRM_REG(pVCpu, bRm),
932 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936 else
937 {
938 /**
939 * @opdone
940 * @opcode 0x11
941 * @oppfx 0xf2
942 * @opcodesub !11 mr/reg
943 * @opcpuid avx
944 * @opgroup og_avx_simdfp_datamove
945 * @opxcpttype 5
946 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
947 * @optest op1=1 op2=2 -> op1=2
948 * @optest op1=0 op2=-22 -> op1=-22
949 */
950 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
951 IEM_MC_BEGIN(0, 2);
952 IEM_MC_LOCAL(uint64_t, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966}
967
968
969FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (IEM_IS_MODRM_REG_MODE(bRm))
973 {
974 /**
975 * @opcode 0x12
976 * @opcodesub 11 mr/reg
977 * @oppfx none
978 * @opcpuid avx
979 * @opgroup og_avx_simdfp_datamerge
980 * @opxcpttype 7LZ
981 * @optest op2=0x2200220122022203
982 * op3=0x3304330533063307
983 * -> op1=0x22002201220222033304330533063307
984 * @optest op2=-1 op3=-42 -> op1=-42
985 * @note op3 and op2 are only the 8-byte high XMM register halfs.
986 */
987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
988 IEM_MC_BEGIN(0, 0);
989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
990
991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
993 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
994 IEM_GET_MODRM_RM(pVCpu, bRm),
995 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
996
997 IEM_MC_ADVANCE_RIP_AND_FINISH();
998 IEM_MC_END();
999 }
1000 else
1001 {
1002 /**
1003 * @opdone
1004 * @opcode 0x12
1005 * @opcodesub !11 mr/reg
1006 * @oppfx none
1007 * @opcpuid avx
1008 * @opgroup og_avx_simdfp_datamove
1009 * @opxcpttype 5LZ
1010 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1011 * @optest op1=1 op2=0 op3=0 -> op1=0
1012 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1013 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1014 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1015 */
1016 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1017
1018 IEM_MC_BEGIN(0, 2);
1019 IEM_MC_LOCAL(uint64_t, uSrc);
1020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1021
1022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1023 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1024 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1025 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1026
1027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1028 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1029 uSrc,
1030 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1031
1032 IEM_MC_ADVANCE_RIP_AND_FINISH();
1033 IEM_MC_END();
1034 }
1035}
1036
1037
1038/**
1039 * @opcode 0x12
1040 * @opcodesub !11 mr/reg
1041 * @oppfx 0x66
1042 * @opcpuid avx
1043 * @opgroup og_avx_pcksclr_datamerge
1044 * @opxcpttype 5LZ
1045 * @optest op2=0 op3=2 -> op1=2
1046 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1047 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1048 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1049 */
1050FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1051{
1052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1053 if (IEM_IS_MODRM_MEM_MODE(bRm))
1054 {
1055 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1056
1057 IEM_MC_BEGIN(0, 2);
1058 IEM_MC_LOCAL(uint64_t, uSrc);
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1065
1066 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1067 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1068 uSrc,
1069 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1070
1071 IEM_MC_ADVANCE_RIP_AND_FINISH();
1072 IEM_MC_END();
1073 }
1074
1075 /**
1076 * @opdone
1077 * @opmnemonic udvex660f12m3
1078 * @opcode 0x12
1079 * @opcodesub 11 mr/reg
1080 * @oppfx 0x66
1081 * @opunused immediate
1082 * @opcpuid avx
1083 * @optest ->
1084 */
1085 else
1086 IEMOP_RAISE_INVALID_OPCODE_RET();
1087}
1088
1089
1090/**
1091 * @opcode 0x12
1092 * @oppfx 0xf3
1093 * @opcpuid avx
1094 * @opgroup og_avx_pcksclr_datamove
1095 * @opxcpttype 4
1096 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1097 * -> op1=0x00000002000000020000000100000001
1098 * @optest vex.l==1 /
1099 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1100 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1101 */
1102FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1103{
1104 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1105 Assert(pVCpu->iem.s.uVexLength <= 1);
1106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1107 if (IEM_IS_MODRM_REG_MODE(bRm))
1108 {
1109 /*
1110 * Register, register.
1111 */
1112 if (pVCpu->iem.s.uVexLength == 0)
1113 {
1114 IEM_MC_BEGIN(0, 1);
1115 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1116 IEM_MC_LOCAL(RTUINT128U, uSrc);
1117
1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1119 IEM_MC_PREPARE_AVX_USAGE();
1120
1121 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1122 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1123 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1124 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1125 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1126 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1127
1128 IEM_MC_ADVANCE_RIP_AND_FINISH();
1129 IEM_MC_END();
1130 }
1131 else
1132 {
1133 IEM_MC_BEGIN(3, 0);
1134 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1135 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1136 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1137 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1138
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1142
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 }
1147 else
1148 {
1149 /*
1150 * Register, memory.
1151 */
1152 if (pVCpu->iem.s.uVexLength == 0)
1153 {
1154 IEM_MC_BEGIN(0, 2);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1161 IEM_MC_PREPARE_AVX_USAGE();
1162
1163 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1164 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1165 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1166 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1167 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1168 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1169
1170 IEM_MC_ADVANCE_RIP_AND_FINISH();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 IEM_MC_BEGIN(3, 2);
1176 IEM_MC_LOCAL(RTUINT256U, uSrc);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1179 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1188 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1189
1190 IEM_MC_ADVANCE_RIP_AND_FINISH();
1191 IEM_MC_END();
1192 }
1193 }
1194}
1195
1196
1197/**
1198 * @opcode 0x12
1199 * @oppfx 0xf2
1200 * @opcpuid avx
1201 * @opgroup og_avx_pcksclr_datamove
1202 * @opxcpttype 5
1203 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1204 * -> op1=0x22222222111111112222222211111111
1205 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1206 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1207 */
1208FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1209{
1210 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 if (pVCpu->iem.s.uVexLength == 0)
1218 {
1219 IEM_MC_BEGIN(1, 0);
1220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1221 IEM_MC_ARG(uint64_t, uSrc, 0);
1222
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1227 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1229 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0);
1237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1238 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1239 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1240 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1241
1242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1243 IEM_MC_PREPARE_AVX_USAGE();
1244 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1245
1246 IEM_MC_ADVANCE_RIP_AND_FINISH();
1247 IEM_MC_END();
1248 }
1249 }
1250 else
1251 {
1252 /*
1253 * Register, memory.
1254 */
1255 if (pVCpu->iem.s.uVexLength == 0)
1256 {
1257 IEM_MC_BEGIN(1, 1);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_ARG(uint64_t, uSrc, 0);
1260
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1264 IEM_MC_PREPARE_AVX_USAGE();
1265
1266 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1267 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1268 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1269 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1270
1271 IEM_MC_ADVANCE_RIP_AND_FINISH();
1272 IEM_MC_END();
1273 }
1274 else
1275 {
1276 IEM_MC_BEGIN(3, 2);
1277 IEM_MC_LOCAL(RTUINT256U, uSrc);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1280 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1282
1283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1286 IEM_MC_PREPARE_AVX_USAGE();
1287
1288 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1290
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 }
1295}
1296
1297
1298/**
1299 * @opcode 0x13
1300 * @opcodesub !11 mr/reg
1301 * @oppfx none
1302 * @opcpuid avx
1303 * @opgroup og_avx_simdfp_datamove
1304 * @opxcpttype 5
1305 * @optest op1=1 op2=2 -> op1=2
1306 * @optest op1=0 op2=-42 -> op1=-42
1307 */
1308FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if (IEM_IS_MODRM_MEM_MODE(bRm))
1312 {
1313 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1314
1315 IEM_MC_BEGIN(0, 2);
1316 IEM_MC_LOCAL(uint64_t, uSrc);
1317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1318
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1323
1324 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1326
1327 IEM_MC_ADVANCE_RIP_AND_FINISH();
1328 IEM_MC_END();
1329 }
1330
1331 /**
1332 * @opdone
1333 * @opmnemonic udvex0f13m3
1334 * @opcode 0x13
1335 * @opcodesub 11 mr/reg
1336 * @oppfx none
1337 * @opunused immediate
1338 * @opcpuid avx
1339 * @optest ->
1340 */
1341 else
1342 IEMOP_RAISE_INVALID_OPCODE_RET();
1343}
1344
1345
1346/**
1347 * @opcode 0x13
1348 * @opcodesub !11 mr/reg
1349 * @oppfx 0x66
1350 * @opcpuid avx
1351 * @opgroup og_avx_pcksclr_datamove
1352 * @opxcpttype 5
1353 * @optest op1=1 op2=2 -> op1=2
1354 * @optest op1=0 op2=-42 -> op1=-42
1355 */
1356FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1357{
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_MEM_MODE(bRm))
1360 {
1361 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1362 IEM_MC_BEGIN(0, 2);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1370
1371 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1372 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP_AND_FINISH();
1375 IEM_MC_END();
1376 }
1377
1378 /**
1379 * @opdone
1380 * @opmnemonic udvex660f13m3
1381 * @opcode 0x13
1382 * @opcodesub 11 mr/reg
1383 * @oppfx 0x66
1384 * @opunused immediate
1385 * @opcpuid avx
1386 * @optest ->
1387 */
1388 else
1389 IEMOP_RAISE_INVALID_OPCODE_RET();
1390}
1391
1392/* Opcode VEX.F3.0F 0x13 - invalid */
1393/* Opcode VEX.F2.0F 0x13 - invalid */
1394
1395/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1396FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1397{
1398 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1399 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1400 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1401}
1402
1403
1404/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1405FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1406{
1407 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1408 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1410}
1411
1412
1413/* Opcode VEX.F3.0F 0x14 - invalid */
1414/* Opcode VEX.F2.0F 0x14 - invalid */
1415
1416
1417/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1418FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1419{
1420 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1421 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1422 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1423}
1424
1425
1426/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1427FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1428{
1429 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1430 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1432}
1433
1434
1435/* Opcode VEX.F3.0F 0x15 - invalid */
1436/* Opcode VEX.F2.0F 0x15 - invalid */
1437
1438
1439FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1440{
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if (IEM_IS_MODRM_REG_MODE(bRm))
1443 {
1444 /**
1445 * @opcode 0x16
1446 * @opcodesub 11 mr/reg
1447 * @oppfx none
1448 * @opcpuid avx
1449 * @opgroup og_avx_simdfp_datamerge
1450 * @opxcpttype 7LZ
1451 */
1452 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1453
1454 IEM_MC_BEGIN(0, 0);
1455 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1456
1457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1458 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1459 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1460 IEM_GET_MODRM_RM(pVCpu, bRm),
1461 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1462
1463 IEM_MC_ADVANCE_RIP_AND_FINISH();
1464 IEM_MC_END();
1465 }
1466 else
1467 {
1468 /**
1469 * @opdone
1470 * @opcode 0x16
1471 * @opcodesub !11 mr/reg
1472 * @oppfx none
1473 * @opcpuid avx
1474 * @opgroup og_avx_simdfp_datamove
1475 * @opxcpttype 5LZ
1476 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(0, 2);
1481 IEM_MC_LOCAL(uint64_t, uSrc);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483
1484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1485 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1488
1489 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1491 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1492 uSrc);
1493
1494 IEM_MC_ADVANCE_RIP_AND_FINISH();
1495 IEM_MC_END();
1496 }
1497}
1498
1499
1500/**
1501 * @opcode 0x16
1502 * @opcodesub !11 mr/reg
1503 * @oppfx 0x66
1504 * @opcpuid avx
1505 * @opgroup og_avx_pcksclr_datamerge
1506 * @opxcpttype 5LZ
1507 */
1508FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1509{
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if (IEM_IS_MODRM_MEM_MODE(bRm))
1512 {
1513 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1514
1515 IEM_MC_BEGIN(0, 2);
1516 IEM_MC_LOCAL(uint64_t, uSrc);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1523
1524 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1525 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1526 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1527 uSrc);
1528
1529 IEM_MC_ADVANCE_RIP_AND_FINISH();
1530 IEM_MC_END();
1531 }
1532
1533 /**
1534 * @opdone
1535 * @opmnemonic udvex660f16m3
1536 * @opcode 0x12
1537 * @opcodesub 11 mr/reg
1538 * @oppfx 0x66
1539 * @opunused immediate
1540 * @opcpuid avx
1541 * @optest ->
1542 */
1543 else
1544 IEMOP_RAISE_INVALID_OPCODE_RET();
1545}
1546
1547
1548/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1549/**
1550 * @opcode 0x16
1551 * @oppfx 0xf3
1552 * @opcpuid avx
1553 * @opgroup og_avx_pcksclr_datamove
1554 * @opxcpttype 4
1555 */
1556FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1557{
1558 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1559 Assert(pVCpu->iem.s.uVexLength <= 1);
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if (IEM_IS_MODRM_REG_MODE(bRm))
1562 {
1563 /*
1564 * Register, register.
1565 */
1566 if (pVCpu->iem.s.uVexLength == 0)
1567 {
1568 IEM_MC_BEGIN(0, 1);
1569 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1570 IEM_MC_LOCAL(RTUINT128U, uSrc);
1571
1572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1573 IEM_MC_PREPARE_AVX_USAGE();
1574
1575 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1579 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1580 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1581
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(3, 0);
1588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1589 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1590 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1591 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1596
1597 IEM_MC_ADVANCE_RIP_AND_FINISH();
1598 IEM_MC_END();
1599 }
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 if (pVCpu->iem.s.uVexLength == 0)
1607 {
1608 IEM_MC_BEGIN(0, 2);
1609 IEM_MC_LOCAL(RTUINT128U, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1620 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1621 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1622 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1623
1624 IEM_MC_ADVANCE_RIP_AND_FINISH();
1625 IEM_MC_END();
1626 }
1627 else
1628 {
1629 IEM_MC_BEGIN(3, 2);
1630 IEM_MC_LOCAL(RTUINT256U, uSrc);
1631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1632 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1633 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1634 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1635
1636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1637 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1639 IEM_MC_PREPARE_AVX_USAGE();
1640
1641 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1642 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1643
1644 IEM_MC_ADVANCE_RIP_AND_FINISH();
1645 IEM_MC_END();
1646 }
1647 }
1648}
1649
1650
1651/* Opcode VEX.F2.0F 0x16 - invalid */
1652
1653
1654/**
1655 * @opcode 0x17
1656 * @opcodesub !11 mr/reg
1657 * @oppfx none
1658 * @opcpuid avx
1659 * @opgroup og_avx_simdfp_datamove
1660 * @opxcpttype 5
1661 */
1662FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1663{
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 if (IEM_IS_MODRM_MEM_MODE(bRm))
1666 {
1667 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1668
1669 IEM_MC_BEGIN(0, 2);
1670 IEM_MC_LOCAL(uint64_t, uSrc);
1671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1672
1673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1674 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1677
1678 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1679 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1680
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684
1685 /**
1686 * @opdone
1687 * @opmnemonic udvex0f17m3
1688 * @opcode 0x17
1689 * @opcodesub 11 mr/reg
1690 * @oppfx none
1691 * @opunused immediate
1692 * @opcpuid avx
1693 * @optest ->
1694 */
1695 else
1696 IEMOP_RAISE_INVALID_OPCODE_RET();
1697}
1698
1699
1700/**
1701 * @opcode 0x17
1702 * @opcodesub !11 mr/reg
1703 * @oppfx 0x66
1704 * @opcpuid avx
1705 * @opgroup og_avx_pcksclr_datamove
1706 * @opxcpttype 5
1707 */
1708FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1709{
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if (IEM_IS_MODRM_MEM_MODE(bRm))
1712 {
1713 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1714 IEM_MC_BEGIN(0, 2);
1715 IEM_MC_LOCAL(uint64_t, uSrc);
1716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1717
1718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1719 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1720 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1721 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1722
1723 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1724 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1725
1726 IEM_MC_ADVANCE_RIP_AND_FINISH();
1727 IEM_MC_END();
1728 }
1729
1730 /**
1731 * @opdone
1732 * @opmnemonic udvex660f17m3
1733 * @opcode 0x17
1734 * @opcodesub 11 mr/reg
1735 * @oppfx 0x66
1736 * @opunused immediate
1737 * @opcpuid avx
1738 * @optest ->
1739 */
1740 else
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742}
1743
1744
1745/* Opcode VEX.F3.0F 0x17 - invalid */
1746/* Opcode VEX.F2.0F 0x17 - invalid */
1747
1748
1749/* Opcode VEX.0F 0x18 - invalid */
1750/* Opcode VEX.0F 0x19 - invalid */
1751/* Opcode VEX.0F 0x1a - invalid */
1752/* Opcode VEX.0F 0x1b - invalid */
1753/* Opcode VEX.0F 0x1c - invalid */
1754/* Opcode VEX.0F 0x1d - invalid */
1755/* Opcode VEX.0F 0x1e - invalid */
1756/* Opcode VEX.0F 0x1f - invalid */
1757
1758/* Opcode VEX.0F 0x20 - invalid */
1759/* Opcode VEX.0F 0x21 - invalid */
1760/* Opcode VEX.0F 0x22 - invalid */
1761/* Opcode VEX.0F 0x23 - invalid */
1762/* Opcode VEX.0F 0x24 - invalid */
1763/* Opcode VEX.0F 0x25 - invalid */
1764/* Opcode VEX.0F 0x26 - invalid */
1765/* Opcode VEX.0F 0x27 - invalid */
1766
1767/**
1768 * @opcode 0x28
1769 * @oppfx none
1770 * @opcpuid avx
1771 * @opgroup og_avx_pcksclr_datamove
1772 * @opxcpttype 1
1773 * @optest op1=1 op2=2 -> op1=2
1774 * @optest op1=0 op2=-42 -> op1=-42
1775 * @note Almost identical to vmovapd.
1776 */
1777FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1778{
1779 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1781 Assert(pVCpu->iem.s.uVexLength <= 1);
1782 if (IEM_IS_MODRM_REG_MODE(bRm))
1783 {
1784 /*
1785 * Register, register.
1786 */
1787 IEM_MC_BEGIN(1, 0);
1788 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1789
1790 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1791 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1792 if (pVCpu->iem.s.uVexLength == 0)
1793 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1794 IEM_GET_MODRM_RM(pVCpu, bRm));
1795 else
1796 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1797 IEM_GET_MODRM_RM(pVCpu, bRm));
1798 IEM_MC_ADVANCE_RIP_AND_FINISH();
1799 IEM_MC_END();
1800 }
1801 else
1802 {
1803 /*
1804 * Register, memory.
1805 */
1806 if (pVCpu->iem.s.uVexLength == 0)
1807 {
1808 IEM_MC_BEGIN(0, 2);
1809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1810 IEM_MC_LOCAL(RTUINT128U, uSrc);
1811
1812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1813 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1814 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1815 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1816
1817 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1818 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1819
1820 IEM_MC_ADVANCE_RIP_AND_FINISH();
1821 IEM_MC_END();
1822 }
1823 else
1824 {
1825 IEM_MC_BEGIN(0, 2);
1826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1827 IEM_MC_LOCAL(RTUINT256U, uSrc);
1828
1829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1832 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1833
1834 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1835 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1836
1837 IEM_MC_ADVANCE_RIP_AND_FINISH();
1838 IEM_MC_END();
1839 }
1840 }
1841}
1842
1843
1844/**
1845 * @opcode 0x28
1846 * @oppfx 66
1847 * @opcpuid avx
1848 * @opgroup og_avx_pcksclr_datamove
1849 * @opxcpttype 1
1850 * @optest op1=1 op2=2 -> op1=2
1851 * @optest op1=0 op2=-42 -> op1=-42
1852 * @note Almost identical to vmovaps
1853 */
1854FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1855{
1856 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1858 Assert(pVCpu->iem.s.uVexLength <= 1);
1859 if (IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 /*
1862 * Register, register.
1863 */
1864 IEM_MC_BEGIN(1, 0);
1865 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1866
1867 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1868 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1869 if (pVCpu->iem.s.uVexLength == 0)
1870 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1871 IEM_GET_MODRM_RM(pVCpu, bRm));
1872 else
1873 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1874 IEM_GET_MODRM_RM(pVCpu, bRm));
1875 IEM_MC_ADVANCE_RIP_AND_FINISH();
1876 IEM_MC_END();
1877 }
1878 else
1879 {
1880 /*
1881 * Register, memory.
1882 */
1883 if (pVCpu->iem.s.uVexLength == 0)
1884 {
1885 IEM_MC_BEGIN(0, 2);
1886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1887 IEM_MC_LOCAL(RTUINT128U, uSrc);
1888
1889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1890 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1891 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1892 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1893
1894 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1895 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1896
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 IEM_MC_BEGIN(0, 2);
1903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1904 IEM_MC_LOCAL(RTUINT256U, uSrc);
1905
1906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1907 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1908 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1909 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1910
1911 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1912 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1913
1914 IEM_MC_ADVANCE_RIP_AND_FINISH();
1915 IEM_MC_END();
1916 }
1917 }
1918}
1919
1920/**
1921 * @opmnemonic udvexf30f28
1922 * @opcode 0x28
1923 * @oppfx 0xf3
1924 * @opunused vex.modrm
1925 * @opcpuid avx
1926 * @optest ->
1927 * @opdone
1928 */
1929
1930/**
1931 * @opmnemonic udvexf20f28
1932 * @opcode 0x28
1933 * @oppfx 0xf2
1934 * @opunused vex.modrm
1935 * @opcpuid avx
1936 * @optest ->
1937 * @opdone
1938 */
1939
1940/**
1941 * @opcode 0x29
1942 * @oppfx none
1943 * @opcpuid avx
1944 * @opgroup og_avx_pcksclr_datamove
1945 * @opxcpttype 1
1946 * @optest op1=1 op2=2 -> op1=2
1947 * @optest op1=0 op2=-42 -> op1=-42
1948 * @note Almost identical to vmovapd.
1949 */
1950FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1951{
1952 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 Assert(pVCpu->iem.s.uVexLength <= 1);
1955 if (IEM_IS_MODRM_REG_MODE(bRm))
1956 {
1957 /*
1958 * Register, register.
1959 */
1960 IEM_MC_BEGIN(1, 0);
1961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1962
1963 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1965 if (pVCpu->iem.s.uVexLength == 0)
1966 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1967 IEM_GET_MODRM_REG(pVCpu, bRm));
1968 else
1969 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1970 IEM_GET_MODRM_REG(pVCpu, bRm));
1971 IEM_MC_ADVANCE_RIP_AND_FINISH();
1972 IEM_MC_END();
1973 }
1974 else
1975 {
1976 /*
1977 * Register, memory.
1978 */
1979 if (pVCpu->iem.s.uVexLength == 0)
1980 {
1981 IEM_MC_BEGIN(0, 2);
1982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1983 IEM_MC_LOCAL(RTUINT128U, uSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1991 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996 else
1997 {
1998 IEM_MC_BEGIN(0, 2);
1999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2000 IEM_MC_LOCAL(RTUINT256U, uSrc);
2001
2002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2003 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2004 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2005 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2006
2007 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2008 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2009
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012 }
2013 }
2014}
2015
2016/**
2017 * @opcode 0x29
2018 * @oppfx 66
2019 * @opcpuid avx
2020 * @opgroup og_avx_pcksclr_datamove
2021 * @opxcpttype 1
2022 * @optest op1=1 op2=2 -> op1=2
2023 * @optest op1=0 op2=-42 -> op1=-42
2024 * @note Almost identical to vmovaps
2025 */
2026FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2027{
2028 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2029 Assert(pVCpu->iem.s.uVexLength <= 1);
2030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2031 if (IEM_IS_MODRM_REG_MODE(bRm))
2032 {
2033 /*
2034 * Register, register.
2035 */
2036 IEM_MC_BEGIN(1, 0);
2037 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2038
2039 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2040 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2041 if (pVCpu->iem.s.uVexLength == 0)
2042 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2043 IEM_GET_MODRM_REG(pVCpu, bRm));
2044 else
2045 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2046 IEM_GET_MODRM_REG(pVCpu, bRm));
2047 IEM_MC_ADVANCE_RIP_AND_FINISH();
2048 IEM_MC_END();
2049 }
2050 else
2051 {
2052 /*
2053 * Register, memory.
2054 */
2055 if (pVCpu->iem.s.uVexLength == 0)
2056 {
2057 IEM_MC_BEGIN(0, 2);
2058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2059 IEM_MC_LOCAL(RTUINT128U, uSrc);
2060
2061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2062 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2065
2066 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2067 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2068
2069 IEM_MC_ADVANCE_RIP_AND_FINISH();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 IEM_MC_BEGIN(0, 2);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076 IEM_MC_LOCAL(RTUINT256U, uSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2080 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2082
2083 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2084 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089 }
2090}
2091
2092
2093/**
2094 * @opmnemonic udvexf30f29
2095 * @opcode 0x29
2096 * @oppfx 0xf3
2097 * @opunused vex.modrm
2098 * @opcpuid avx
2099 * @optest ->
2100 * @opdone
2101 */
2102
2103/**
2104 * @opmnemonic udvexf20f29
2105 * @opcode 0x29
2106 * @oppfx 0xf2
2107 * @opunused vex.modrm
2108 * @opcpuid avx
2109 * @optest ->
2110 * @opdone
2111 */
2112
2113
2114/** Opcode VEX.0F 0x2a - invalid */
2115/** Opcode VEX.66.0F 0x2a - invalid */
2116/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2117FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2118/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2119FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2120
2121
2122/**
2123 * @opcode 0x2b
2124 * @opcodesub !11 mr/reg
2125 * @oppfx none
2126 * @opcpuid avx
2127 * @opgroup og_avx_cachect
2128 * @opxcpttype 1
2129 * @optest op1=1 op2=2 -> op1=2
2130 * @optest op1=0 op2=-42 -> op1=-42
2131 * @note Identical implementation to vmovntpd
2132 */
2133FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2134{
2135 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2136 Assert(pVCpu->iem.s.uVexLength <= 1);
2137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2138 if (IEM_IS_MODRM_MEM_MODE(bRm))
2139 {
2140 /*
2141 * memory, register.
2142 */
2143 if (pVCpu->iem.s.uVexLength == 0)
2144 {
2145 IEM_MC_BEGIN(0, 2);
2146 IEM_MC_LOCAL(RTUINT128U, uSrc);
2147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2148
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2150 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2151 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2152 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2153
2154 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2155 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2156
2157 IEM_MC_ADVANCE_RIP_AND_FINISH();
2158 IEM_MC_END();
2159 }
2160 else
2161 {
2162 IEM_MC_BEGIN(0, 2);
2163 IEM_MC_LOCAL(RTUINT256U, uSrc);
2164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2165
2166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2167 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2168 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2170
2171 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2172 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2173
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 }
2178 /* The register, register encoding is invalid. */
2179 else
2180 IEMOP_RAISE_INVALID_OPCODE_RET();
2181}
2182
2183/**
2184 * @opcode 0x2b
2185 * @opcodesub !11 mr/reg
2186 * @oppfx 0x66
2187 * @opcpuid avx
2188 * @opgroup og_avx_cachect
2189 * @opxcpttype 1
2190 * @optest op1=1 op2=2 -> op1=2
2191 * @optest op1=0 op2=-42 -> op1=-42
2192 * @note Identical implementation to vmovntps
2193 */
2194FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2195{
2196 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2197 Assert(pVCpu->iem.s.uVexLength <= 1);
2198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2199 if (IEM_IS_MODRM_MEM_MODE(bRm))
2200 {
2201 /*
2202 * memory, register.
2203 */
2204 if (pVCpu->iem.s.uVexLength == 0)
2205 {
2206 IEM_MC_BEGIN(0, 2);
2207 IEM_MC_LOCAL(RTUINT128U, uSrc);
2208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2209
2210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2211 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2214
2215 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2216 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2217
2218 IEM_MC_ADVANCE_RIP_AND_FINISH();
2219 IEM_MC_END();
2220 }
2221 else
2222 {
2223 IEM_MC_BEGIN(0, 2);
2224 IEM_MC_LOCAL(RTUINT256U, uSrc);
2225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2226
2227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2231
2232 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2233 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2234
2235 IEM_MC_ADVANCE_RIP_AND_FINISH();
2236 IEM_MC_END();
2237 }
2238 }
2239 /* The register, register encoding is invalid. */
2240 else
2241 IEMOP_RAISE_INVALID_OPCODE_RET();
2242}
2243
2244/**
2245 * @opmnemonic udvexf30f2b
2246 * @opcode 0x2b
2247 * @oppfx 0xf3
2248 * @opunused vex.modrm
2249 * @opcpuid avx
2250 * @optest ->
2251 * @opdone
2252 */
2253
2254/**
2255 * @opmnemonic udvexf20f2b
2256 * @opcode 0x2b
2257 * @oppfx 0xf2
2258 * @opunused vex.modrm
2259 * @opcpuid avx
2260 * @optest ->
2261 * @opdone
2262 */
2263
2264
2265/* Opcode VEX.0F 0x2c - invalid */
2266/* Opcode VEX.66.0F 0x2c - invalid */
2267/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2268FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2269/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2270FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2271
2272/* Opcode VEX.0F 0x2d - invalid */
2273/* Opcode VEX.66.0F 0x2d - invalid */
2274/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2275FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2276/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2277FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2278
2279
2280/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2281FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2282{
2283 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2285 if (IEM_IS_MODRM_REG_MODE(bRm))
2286 {
2287 /*
2288 * Register, register.
2289 */
2290 IEM_MC_BEGIN(4, 1);
2291 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2292 IEM_MC_LOCAL(uint32_t, fEFlags);
2293 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2294 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2295 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2296 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2297 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2298 IEM_MC_PREPARE_AVX_USAGE();
2299 IEM_MC_FETCH_EFLAGS(fEFlags);
2300 IEM_MC_REF_MXCSR(pfMxcsr);
2301 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2303 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2304 pfMxcsr, pEFlags, puSrc1, puSrc2);
2305 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2306 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2307 } IEM_MC_ELSE() {
2308 IEM_MC_COMMIT_EFLAGS(fEFlags);
2309 } IEM_MC_ENDIF();
2310
2311 IEM_MC_ADVANCE_RIP_AND_FINISH();
2312 IEM_MC_END();
2313 }
2314 else
2315 {
2316 /*
2317 * Register, memory.
2318 */
2319 IEM_MC_BEGIN(4, 3);
2320 IEM_MC_LOCAL(uint32_t, fEFlags);
2321 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2322 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2323 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2324 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2325 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2327
2328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2329 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2331 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2332
2333 IEM_MC_PREPARE_AVX_USAGE();
2334 IEM_MC_FETCH_EFLAGS(fEFlags);
2335 IEM_MC_REF_MXCSR(pfMxcsr);
2336 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2337 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2338 pfMxcsr, pEFlags, puSrc1, puSrc2);
2339 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2340 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2341 } IEM_MC_ELSE() {
2342 IEM_MC_COMMIT_EFLAGS(fEFlags);
2343 } IEM_MC_ENDIF();
2344
2345 IEM_MC_ADVANCE_RIP_AND_FINISH();
2346 IEM_MC_END();
2347 }
2348}
2349
2350
2351/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2352FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2353{
2354 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2356 if (IEM_IS_MODRM_REG_MODE(bRm))
2357 {
2358 /*
2359 * Register, register.
2360 */
2361 IEM_MC_BEGIN(4, 1);
2362 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2363 IEM_MC_LOCAL(uint32_t, fEFlags);
2364 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2365 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2366 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2367 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2369 IEM_MC_PREPARE_AVX_USAGE();
2370 IEM_MC_FETCH_EFLAGS(fEFlags);
2371 IEM_MC_REF_MXCSR(pfMxcsr);
2372 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2373 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2374 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2375 pfMxcsr, pEFlags, puSrc1, puSrc2);
2376 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2377 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2378 } IEM_MC_ELSE() {
2379 IEM_MC_COMMIT_EFLAGS(fEFlags);
2380 } IEM_MC_ENDIF();
2381
2382 IEM_MC_ADVANCE_RIP_AND_FINISH();
2383 IEM_MC_END();
2384 }
2385 else
2386 {
2387 /*
2388 * Register, memory.
2389 */
2390 IEM_MC_BEGIN(4, 3);
2391 IEM_MC_LOCAL(uint32_t, fEFlags);
2392 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2393 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2394 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2395 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2396 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2398
2399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2400 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2401 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2402 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2403
2404 IEM_MC_PREPARE_AVX_USAGE();
2405 IEM_MC_FETCH_EFLAGS(fEFlags);
2406 IEM_MC_REF_MXCSR(pfMxcsr);
2407 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2408 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2409 pfMxcsr, pEFlags, puSrc1, puSrc2);
2410 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2411 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2412 } IEM_MC_ELSE() {
2413 IEM_MC_COMMIT_EFLAGS(fEFlags);
2414 } IEM_MC_ENDIF();
2415
2416 IEM_MC_ADVANCE_RIP_AND_FINISH();
2417 IEM_MC_END();
2418 }
2419}
2420
2421
2422/* Opcode VEX.F3.0F 0x2e - invalid */
2423/* Opcode VEX.F2.0F 0x2e - invalid */
2424
2425/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2426FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2427{
2428 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2430 if (IEM_IS_MODRM_REG_MODE(bRm))
2431 {
2432 /*
2433 * Register, register.
2434 */
2435 IEM_MC_BEGIN(4, 1);
2436 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2437 IEM_MC_LOCAL(uint32_t, fEFlags);
2438 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2439 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2440 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2441 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2442 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2443 IEM_MC_PREPARE_AVX_USAGE();
2444 IEM_MC_FETCH_EFLAGS(fEFlags);
2445 IEM_MC_REF_MXCSR(pfMxcsr);
2446 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2447 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2448 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2449 pfMxcsr, pEFlags, puSrc1, puSrc2);
2450 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2451 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2452 } IEM_MC_ELSE() {
2453 IEM_MC_COMMIT_EFLAGS(fEFlags);
2454 } IEM_MC_ENDIF();
2455
2456 IEM_MC_ADVANCE_RIP_AND_FINISH();
2457 IEM_MC_END();
2458 }
2459 else
2460 {
2461 /*
2462 * Register, memory.
2463 */
2464 IEM_MC_BEGIN(4, 3);
2465 IEM_MC_LOCAL(uint32_t, fEFlags);
2466 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2467 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2468 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2469 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2470 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2472
2473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2474 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2475 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2476 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2477
2478 IEM_MC_PREPARE_AVX_USAGE();
2479 IEM_MC_FETCH_EFLAGS(fEFlags);
2480 IEM_MC_REF_MXCSR(pfMxcsr);
2481 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2482 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2483 pfMxcsr, pEFlags, puSrc1, puSrc2);
2484 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2485 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2486 } IEM_MC_ELSE() {
2487 IEM_MC_COMMIT_EFLAGS(fEFlags);
2488 } IEM_MC_ENDIF();
2489
2490 IEM_MC_ADVANCE_RIP_AND_FINISH();
2491 IEM_MC_END();
2492 }
2493}
2494
2495
2496/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2497FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2498{
2499 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2501 if (IEM_IS_MODRM_REG_MODE(bRm))
2502 {
2503 /*
2504 * Register, register.
2505 */
2506 IEM_MC_BEGIN(4, 1);
2507 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2508 IEM_MC_LOCAL(uint32_t, fEFlags);
2509 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2510 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2511 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2512 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2514 IEM_MC_PREPARE_AVX_USAGE();
2515 IEM_MC_FETCH_EFLAGS(fEFlags);
2516 IEM_MC_REF_MXCSR(pfMxcsr);
2517 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2518 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2519 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2520 pfMxcsr, pEFlags, puSrc1, puSrc2);
2521 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2522 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2523 } IEM_MC_ELSE() {
2524 IEM_MC_COMMIT_EFLAGS(fEFlags);
2525 } IEM_MC_ENDIF();
2526
2527 IEM_MC_ADVANCE_RIP_AND_FINISH();
2528 IEM_MC_END();
2529 }
2530 else
2531 {
2532 /*
2533 * Register, memory.
2534 */
2535 IEM_MC_BEGIN(4, 3);
2536 IEM_MC_LOCAL(uint32_t, fEFlags);
2537 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2538 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2539 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2540 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2541 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2543
2544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2545 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2546 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2547 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2548
2549 IEM_MC_PREPARE_AVX_USAGE();
2550 IEM_MC_FETCH_EFLAGS(fEFlags);
2551 IEM_MC_REF_MXCSR(pfMxcsr);
2552 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2553 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2554 pfMxcsr, pEFlags, puSrc1, puSrc2);
2555 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2556 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2557 } IEM_MC_ELSE() {
2558 IEM_MC_COMMIT_EFLAGS(fEFlags);
2559 } IEM_MC_ENDIF();
2560
2561 IEM_MC_ADVANCE_RIP_AND_FINISH();
2562 IEM_MC_END();
2563 }
2564}
2565
2566
2567/* Opcode VEX.F3.0F 0x2f - invalid */
2568/* Opcode VEX.F2.0F 0x2f - invalid */
2569
2570/* Opcode VEX.0F 0x30 - invalid */
2571/* Opcode VEX.0F 0x31 - invalid */
2572/* Opcode VEX.0F 0x32 - invalid */
2573/* Opcode VEX.0F 0x33 - invalid */
2574/* Opcode VEX.0F 0x34 - invalid */
2575/* Opcode VEX.0F 0x35 - invalid */
2576/* Opcode VEX.0F 0x36 - invalid */
2577/* Opcode VEX.0F 0x37 - invalid */
2578/* Opcode VEX.0F 0x38 - invalid */
2579/* Opcode VEX.0F 0x39 - invalid */
2580/* Opcode VEX.0F 0x3a - invalid */
2581/* Opcode VEX.0F 0x3b - invalid */
2582/* Opcode VEX.0F 0x3c - invalid */
2583/* Opcode VEX.0F 0x3d - invalid */
2584/* Opcode VEX.0F 0x3e - invalid */
2585/* Opcode VEX.0F 0x3f - invalid */
2586/* Opcode VEX.0F 0x40 - invalid */
2587/* Opcode VEX.0F 0x41 - invalid */
2588/* Opcode VEX.0F 0x42 - invalid */
2589/* Opcode VEX.0F 0x43 - invalid */
2590/* Opcode VEX.0F 0x44 - invalid */
2591/* Opcode VEX.0F 0x45 - invalid */
2592/* Opcode VEX.0F 0x46 - invalid */
2593/* Opcode VEX.0F 0x47 - invalid */
2594/* Opcode VEX.0F 0x48 - invalid */
2595/* Opcode VEX.0F 0x49 - invalid */
2596/* Opcode VEX.0F 0x4a - invalid */
2597/* Opcode VEX.0F 0x4b - invalid */
2598/* Opcode VEX.0F 0x4c - invalid */
2599/* Opcode VEX.0F 0x4d - invalid */
2600/* Opcode VEX.0F 0x4e - invalid */
2601/* Opcode VEX.0F 0x4f - invalid */
2602
2603
2604/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2605FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2606{
2607 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2609 if (IEM_IS_MODRM_REG_MODE(bRm))
2610 {
2611 /*
2612 * Register, register.
2613 */
2614 if (pVCpu->iem.s.uVexLength == 0)
2615 {
2616 IEM_MC_BEGIN(2, 1);
2617 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2618 IEM_MC_LOCAL(uint8_t, u8Dst);
2619 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2620 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2621 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2622 IEM_MC_PREPARE_AVX_USAGE();
2623 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2624 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2625 pu8Dst, puSrc);
2626 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2627 IEM_MC_ADVANCE_RIP_AND_FINISH();
2628 IEM_MC_END();
2629 }
2630 else
2631 {
2632 IEM_MC_BEGIN(2, 2);
2633 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2634 IEM_MC_LOCAL(uint8_t, u8Dst);
2635 IEM_MC_LOCAL(RTUINT256U, uSrc);
2636 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2637 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2638
2639 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2640 IEM_MC_PREPARE_AVX_USAGE();
2641 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2642 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2643 pu8Dst, puSrc);
2644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2645 IEM_MC_ADVANCE_RIP_AND_FINISH();
2646 IEM_MC_END();
2647 }
2648 }
2649 /* No memory operand. */
2650 else
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652}
2653
2654
2655/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2656FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2657{
2658 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2660 if (IEM_IS_MODRM_REG_MODE(bRm))
2661 {
2662 /*
2663 * Register, register.
2664 */
2665 if (pVCpu->iem.s.uVexLength == 0)
2666 {
2667 IEM_MC_BEGIN(2, 1);
2668 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2669 IEM_MC_LOCAL(uint8_t, u8Dst);
2670 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2671 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2672 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2673 IEM_MC_PREPARE_AVX_USAGE();
2674 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2675 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2676 pu8Dst, puSrc);
2677 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2678 IEM_MC_ADVANCE_RIP_AND_FINISH();
2679 IEM_MC_END();
2680 }
2681 else
2682 {
2683 IEM_MC_BEGIN(2, 2);
2684 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2685 IEM_MC_LOCAL(uint8_t, u8Dst);
2686 IEM_MC_LOCAL(RTUINT256U, uSrc);
2687 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2688 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2689
2690 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2691 IEM_MC_PREPARE_AVX_USAGE();
2692 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2693 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2694 pu8Dst, puSrc);
2695 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 }
2700 /* No memory operand. */
2701 else
2702 IEMOP_RAISE_INVALID_OPCODE_RET();
2703}
2704
2705
2706/* Opcode VEX.F3.0F 0x50 - invalid */
2707/* Opcode VEX.F2.0F 0x50 - invalid */
2708
2709/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2710FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2711/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2712FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2713/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2714FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2715/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2716FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2717
2718/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2719FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2720/* Opcode VEX.66.0F 0x52 - invalid */
2721/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2722FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2723/* Opcode VEX.F2.0F 0x52 - invalid */
2724
2725/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2726FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2727/* Opcode VEX.66.0F 0x53 - invalid */
2728/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2729FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2730/* Opcode VEX.F2.0F 0x53 - invalid */
2731
2732
2733/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2734FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2735{
2736 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2737 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2738 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2739}
2740
2741
2742/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2743FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2744{
2745 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2746 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2747 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2748}
2749
2750
2751/* Opcode VEX.F3.0F 0x54 - invalid */
2752/* Opcode VEX.F2.0F 0x54 - invalid */
2753
2754
2755/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2756FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2757{
2758 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2759 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2760 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2761}
2762
2763
2764/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2765FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2766{
2767 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2768 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2769 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2770}
2771
2772
2773/* Opcode VEX.F3.0F 0x55 - invalid */
2774/* Opcode VEX.F2.0F 0x55 - invalid */
2775
2776/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2777FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2778{
2779 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2781 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2782}
2783
2784
2785/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2786FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2787{
2788 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2790 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2791}
2792
2793
2794/* Opcode VEX.F3.0F 0x56 - invalid */
2795/* Opcode VEX.F2.0F 0x56 - invalid */
2796
2797
2798/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2799FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2800{
2801 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2802 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2803 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2804}
2805
2806
2807/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2808FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2809{
2810 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2811 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2812 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2813}
2814
2815
2816/* Opcode VEX.F3.0F 0x57 - invalid */
2817/* Opcode VEX.F2.0F 0x57 - invalid */
2818
2819/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2820FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2821/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2822FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2823/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2824FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2825/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2826FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2827
2828/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2829FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2830/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2831FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2832/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2833FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2834/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2835FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2836
2837/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2838FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2839/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2840FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2841/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2842FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2843/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2844FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2845
2846/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2847FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2848/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2849FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2850/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2851FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2852/* Opcode VEX.F2.0F 0x5b - invalid */
2853
2854/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2855FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2856/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2857FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2858/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2859FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2860/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2861FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2862
2863/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2864FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2865/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2866FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2867/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2868FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2869/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2870FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2871
2872/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2873FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2874/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2875FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2876/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2877FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2878/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2879FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2880
2881/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2882FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2883/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2884FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2885/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2886FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2887/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2888FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2889
2890
2891/* Opcode VEX.0F 0x60 - invalid */
2892
2893
2894/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2895FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2896{
2897 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2898 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2899 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2900}
2901
2902
2903/* Opcode VEX.F3.0F 0x60 - invalid */
2904
2905
2906/* Opcode VEX.0F 0x61 - invalid */
2907
2908
2909/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2910FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2911{
2912 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2913 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2914 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2915}
2916
2917
2918/* Opcode VEX.F3.0F 0x61 - invalid */
2919
2920
2921/* Opcode VEX.0F 0x62 - invalid */
2922
2923/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2924FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2925{
2926 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2927 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2928 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2929}
2930
2931
2932/* Opcode VEX.F3.0F 0x62 - invalid */
2933
2934
2935
2936/* Opcode VEX.0F 0x63 - invalid */
2937
2938
2939/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2940FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2941{
2942 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2943 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2944 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2945}
2946
2947
2948/* Opcode VEX.F3.0F 0x63 - invalid */
2949
2950/* Opcode VEX.0F 0x64 - invalid */
2951
2952
2953/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2954FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2955{
2956 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2957 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2958 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2959}
2960
2961
2962/* Opcode VEX.F3.0F 0x64 - invalid */
2963
2964/* Opcode VEX.0F 0x65 - invalid */
2965
2966
2967/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2968FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2969{
2970 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2971 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2972 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2973}
2974
2975
2976/* Opcode VEX.F3.0F 0x65 - invalid */
2977
2978/* Opcode VEX.0F 0x66 - invalid */
2979
2980
2981/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
2982FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
2983{
2984 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2985 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
2986 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2987}
2988
2989
2990/* Opcode VEX.F3.0F 0x66 - invalid */
2991
2992/* Opcode VEX.0F 0x67 - invalid */
2993
2994
2995/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
2996FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
2997{
2998 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2999 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3000 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3001}
3002
3003
3004/* Opcode VEX.F3.0F 0x67 - invalid */
3005
3006
3007///**
3008// * Common worker for SSE2 instructions on the form:
3009// * pxxxx xmm1, xmm2/mem128
3010// *
3011// * The 2nd operand is the second half of a register, which in the memory case
3012// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3013// * where it may read the full 128 bits or only the upper 64 bits.
3014// *
3015// * Exceptions type 4.
3016// */
3017//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3018//{
3019// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3020// if (IEM_IS_MODRM_REG_MODE(bRm))
3021// {
3022// /*
3023// * Register, register.
3024// */
3025// IEM_MC_BEGIN(2, 0);
3026// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3027// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3028// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3029// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3030// IEM_MC_PREPARE_SSE_USAGE();
3031// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3032// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3033// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3034// IEM_MC_ADVANCE_RIP_AND_FINISH();
3035// IEM_MC_END();
3036// }
3037// else
3038// {
3039// /*
3040// * Register, memory.
3041// */
3042// IEM_MC_BEGIN(2, 2);
3043// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3044// IEM_MC_LOCAL(RTUINT128U, uSrc);
3045// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3046// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3047//
3048// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3049// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3050// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3051// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3052//
3053// IEM_MC_PREPARE_SSE_USAGE();
3054// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3055// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3056//
3057// IEM_MC_ADVANCE_RIP_AND_FINISH();
3058// IEM_MC_END();
3059// }
3060// return VINF_SUCCESS;
3061//}
3062
3063
3064/* Opcode VEX.0F 0x68 - invalid */
3065
3066/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3067FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3068{
3069 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3070 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3071 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3072}
3073
3074
3075/* Opcode VEX.F3.0F 0x68 - invalid */
3076
3077
3078/* Opcode VEX.0F 0x69 - invalid */
3079
3080
3081/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3082FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3083{
3084 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3085 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3086 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3087}
3088
3089
3090/* Opcode VEX.F3.0F 0x69 - invalid */
3091
3092
3093/* Opcode VEX.0F 0x6a - invalid */
3094
3095
3096/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3097FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3098{
3099 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3100 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3101 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3102}
3103
3104
3105/* Opcode VEX.F3.0F 0x6a - invalid */
3106
3107
3108/* Opcode VEX.0F 0x6b - invalid */
3109
3110
3111/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3112FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3113{
3114 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3115 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3117}
3118
3119
3120/* Opcode VEX.F3.0F 0x6b - invalid */
3121
3122
3123/* Opcode VEX.0F 0x6c - invalid */
3124
3125
3126/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3127FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3128{
3129 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3130 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3131 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3132}
3133
3134
3135/* Opcode VEX.F3.0F 0x6c - invalid */
3136/* Opcode VEX.F2.0F 0x6c - invalid */
3137
3138
3139/* Opcode VEX.0F 0x6d - invalid */
3140
3141
3142/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3143FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3144{
3145 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3146 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3147 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3148}
3149
3150
3151/* Opcode VEX.F3.0F 0x6d - invalid */
3152
3153
3154/* Opcode VEX.0F 0x6e - invalid */
3155
3156FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3157{
3158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3159 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3160 {
3161 /**
3162 * @opcode 0x6e
3163 * @opcodesub rex.w=1
3164 * @oppfx 0x66
3165 * @opcpuid avx
3166 * @opgroup og_avx_simdint_datamov
3167 * @opxcpttype 5
3168 * @optest 64-bit / op1=1 op2=2 -> op1=2
3169 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3170 */
3171 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3172 if (IEM_IS_MODRM_REG_MODE(bRm))
3173 {
3174 /* XMM, greg64 */
3175 IEM_MC_BEGIN(0, 1);
3176 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3177 IEM_MC_LOCAL(uint64_t, u64Tmp);
3178
3179 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3180 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3181
3182 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3183 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* XMM, [mem64] */
3191 IEM_MC_BEGIN(0, 2);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3193 IEM_MC_LOCAL(uint64_t, u64Tmp);
3194
3195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3198 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3199
3200 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3201 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3202
3203 IEM_MC_ADVANCE_RIP_AND_FINISH();
3204 IEM_MC_END();
3205 }
3206 }
3207 else
3208 {
3209 /**
3210 * @opdone
3211 * @opcode 0x6e
3212 * @opcodesub rex.w=0
3213 * @oppfx 0x66
3214 * @opcpuid avx
3215 * @opgroup og_avx_simdint_datamov
3216 * @opxcpttype 5
3217 * @opfunction iemOp_vmovd_q_Vy_Ey
3218 * @optest op1=1 op2=2 -> op1=2
3219 * @optest op1=0 op2=-42 -> op1=-42
3220 */
3221 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 /* XMM, greg32 */
3225 IEM_MC_BEGIN(0, 1);
3226 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3227 IEM_MC_LOCAL(uint32_t, u32Tmp);
3228
3229 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3230 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3231
3232 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3233 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3234
3235 IEM_MC_ADVANCE_RIP_AND_FINISH();
3236 IEM_MC_END();
3237 }
3238 else
3239 {
3240 /* XMM, [mem32] */
3241 IEM_MC_BEGIN(0, 2);
3242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3243 IEM_MC_LOCAL(uint32_t, u32Tmp);
3244
3245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3246 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3248 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3249
3250 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3251 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3252
3253 IEM_MC_ADVANCE_RIP_AND_FINISH();
3254 IEM_MC_END();
3255 }
3256 }
3257}
3258
3259
3260/* Opcode VEX.F3.0F 0x6e - invalid */
3261
3262
3263/* Opcode VEX.0F 0x6f - invalid */
3264
3265/**
3266 * @opcode 0x6f
3267 * @oppfx 0x66
3268 * @opcpuid avx
3269 * @opgroup og_avx_simdint_datamove
3270 * @opxcpttype 1
3271 * @optest op1=1 op2=2 -> op1=2
3272 * @optest op1=0 op2=-42 -> op1=-42
3273 */
3274FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3275{
3276 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3277 Assert(pVCpu->iem.s.uVexLength <= 1);
3278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3279 if (IEM_IS_MODRM_REG_MODE(bRm))
3280 {
3281 /*
3282 * Register, register.
3283 */
3284 IEM_MC_BEGIN(0, 0);
3285 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3286
3287 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3288 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3289 if (pVCpu->iem.s.uVexLength == 0)
3290 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3291 IEM_GET_MODRM_RM(pVCpu, bRm));
3292 else
3293 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3294 IEM_GET_MODRM_RM(pVCpu, bRm));
3295 IEM_MC_ADVANCE_RIP_AND_FINISH();
3296 IEM_MC_END();
3297 }
3298 else if (pVCpu->iem.s.uVexLength == 0)
3299 {
3300 /*
3301 * Register, memory128.
3302 */
3303 IEM_MC_BEGIN(0, 2);
3304 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3306
3307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3308 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3309 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3310 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3311
3312 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3313 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3314
3315 IEM_MC_ADVANCE_RIP_AND_FINISH();
3316 IEM_MC_END();
3317 }
3318 else
3319 {
3320 /*
3321 * Register, memory256.
3322 */
3323 IEM_MC_BEGIN(0, 2);
3324 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3326
3327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3331
3332 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3333 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3334
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338}
3339
3340/**
3341 * @opcode 0x6f
3342 * @oppfx 0xf3
3343 * @opcpuid avx
3344 * @opgroup og_avx_simdint_datamove
3345 * @opxcpttype 4UA
3346 * @optest op1=1 op2=2 -> op1=2
3347 * @optest op1=0 op2=-42 -> op1=-42
3348 */
3349FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3350{
3351 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3352 Assert(pVCpu->iem.s.uVexLength <= 1);
3353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3354 if (IEM_IS_MODRM_REG_MODE(bRm))
3355 {
3356 /*
3357 * Register, register.
3358 */
3359 IEM_MC_BEGIN(0, 0);
3360 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3361
3362 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3363 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3364 if (pVCpu->iem.s.uVexLength == 0)
3365 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3366 IEM_GET_MODRM_RM(pVCpu, bRm));
3367 else
3368 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3369 IEM_GET_MODRM_RM(pVCpu, bRm));
3370 IEM_MC_ADVANCE_RIP_AND_FINISH();
3371 IEM_MC_END();
3372 }
3373 else if (pVCpu->iem.s.uVexLength == 0)
3374 {
3375 /*
3376 * Register, memory128.
3377 */
3378 IEM_MC_BEGIN(0, 2);
3379 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3381
3382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3383 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3384 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3386
3387 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3388 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3389
3390 IEM_MC_ADVANCE_RIP_AND_FINISH();
3391 IEM_MC_END();
3392 }
3393 else
3394 {
3395 /*
3396 * Register, memory256.
3397 */
3398 IEM_MC_BEGIN(0, 2);
3399 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3401
3402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3403 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3404 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3405 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3406
3407 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3408 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3409
3410 IEM_MC_ADVANCE_RIP_AND_FINISH();
3411 IEM_MC_END();
3412 }
3413}
3414
3415
3416/* Opcode VEX.0F 0x70 - invalid */
3417
3418
3419/**
3420 * Common worker for AVX/AVX2 instructions on the forms:
3421 * - vpxxx xmm0, xmm2/mem128, imm8
3422 * - vpxxx ymm0, ymm2/mem256, imm8
3423 *
3424 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3425 */
3426FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3427{
3428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3429 if (IEM_IS_MODRM_REG_MODE(bRm))
3430 {
3431 /*
3432 * Register, register.
3433 */
3434 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3435 if (pVCpu->iem.s.uVexLength)
3436 {
3437 IEM_MC_BEGIN(3, 2);
3438 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3439 IEM_MC_LOCAL(RTUINT256U, uDst);
3440 IEM_MC_LOCAL(RTUINT256U, uSrc);
3441 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3442 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3443 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3444 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3445 IEM_MC_PREPARE_AVX_USAGE();
3446 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3447 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3448 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3449 IEM_MC_ADVANCE_RIP_AND_FINISH();
3450 IEM_MC_END();
3451 }
3452 else
3453 {
3454 IEM_MC_BEGIN(3, 0);
3455 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3456 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3457 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3458 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3460 IEM_MC_PREPARE_AVX_USAGE();
3461 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3462 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3463 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3464 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3465 IEM_MC_ADVANCE_RIP_AND_FINISH();
3466 IEM_MC_END();
3467 }
3468 }
3469 else
3470 {
3471 /*
3472 * Register, memory.
3473 */
3474 if (pVCpu->iem.s.uVexLength)
3475 {
3476 IEM_MC_BEGIN(3, 3);
3477 IEM_MC_LOCAL(RTUINT256U, uDst);
3478 IEM_MC_LOCAL(RTUINT256U, uSrc);
3479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3480 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3481 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3482
3483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3484 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3485 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3486 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3487 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3488 IEM_MC_PREPARE_AVX_USAGE();
3489
3490 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3491 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3492 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3493
3494 IEM_MC_ADVANCE_RIP_AND_FINISH();
3495 IEM_MC_END();
3496 }
3497 else
3498 {
3499 IEM_MC_BEGIN(3, 1);
3500 IEM_MC_LOCAL(RTUINT128U, uSrc);
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3502 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3503 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3504
3505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3506 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3507 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3508 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3509 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3510 IEM_MC_PREPARE_AVX_USAGE();
3511
3512 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3513 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3514 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3515 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3516
3517 IEM_MC_ADVANCE_RIP_AND_FINISH();
3518 IEM_MC_END();
3519 }
3520 }
3521}
3522
3523
3524/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3525FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3526{
3527 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3528 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3529 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3530
3531}
3532
3533
3534/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3535FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3536{
3537 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3538 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3539 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3540
3541}
3542
3543
3544/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3545FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3546{
3547 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3548 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3549 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3550}
3551
3552
3553/* Opcode VEX.0F 0x71 11/2 - invalid. */
3554/** Opcode VEX.66.0F 0x71 11/2. */
3555FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3556
3557/* Opcode VEX.0F 0x71 11/4 - invalid */
3558/** Opcode VEX.66.0F 0x71 11/4. */
3559FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3560
3561/* Opcode VEX.0F 0x71 11/6 - invalid */
3562/** Opcode VEX.66.0F 0x71 11/6. */
3563FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3564
3565
3566/**
3567 * VEX Group 12 jump table for register variant.
3568 */
3569IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3570{
3571 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3572 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3573 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3574 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3575 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3576 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3577 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3578 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3579};
3580AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3581
3582
3583/** Opcode VEX.0F 0x71. */
3584FNIEMOP_DEF(iemOp_VGrp12)
3585{
3586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3587 if (IEM_IS_MODRM_REG_MODE(bRm))
3588 /* register, register */
3589 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3590 + pVCpu->iem.s.idxPrefix], bRm);
3591 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3592}
3593
3594
3595/* Opcode VEX.0F 0x72 11/2 - invalid. */
3596/** Opcode VEX.66.0F 0x72 11/2. */
3597FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3598
3599/* Opcode VEX.0F 0x72 11/4 - invalid. */
3600/** Opcode VEX.66.0F 0x72 11/4. */
3601FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3602
3603/* Opcode VEX.0F 0x72 11/6 - invalid. */
3604/** Opcode VEX.66.0F 0x72 11/6. */
3605FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3606
3607
3608/**
3609 * Group 13 jump table for register variant.
3610 */
3611IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3612{
3613 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3614 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3615 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3616 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3617 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3618 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3619 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3620 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3621};
3622AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3623
3624/** Opcode VEX.0F 0x72. */
3625FNIEMOP_DEF(iemOp_VGrp13)
3626{
3627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3628 if (IEM_IS_MODRM_REG_MODE(bRm))
3629 /* register, register */
3630 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3631 + pVCpu->iem.s.idxPrefix], bRm);
3632 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3633}
3634
3635
3636/* Opcode VEX.0F 0x73 11/2 - invalid. */
3637/** Opcode VEX.66.0F 0x73 11/2. */
3638FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3639
3640/** Opcode VEX.66.0F 0x73 11/3. */
3641FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3642
3643/* Opcode VEX.0F 0x73 11/6 - invalid. */
3644/** Opcode VEX.66.0F 0x73 11/6. */
3645FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3646
3647/** Opcode VEX.66.0F 0x73 11/7. */
3648FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3649
3650/**
3651 * Group 14 jump table for register variant.
3652 */
3653IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3654{
3655 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3656 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3657 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3658 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3659 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3660 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3661 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3662 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3663};
3664AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3665
3666
3667/** Opcode VEX.0F 0x73. */
3668FNIEMOP_DEF(iemOp_VGrp14)
3669{
3670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3671 if (IEM_IS_MODRM_REG_MODE(bRm))
3672 /* register, register */
3673 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3674 + pVCpu->iem.s.idxPrefix], bRm);
3675 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3676}
3677
3678
3679/* Opcode VEX.0F 0x74 - invalid */
3680
3681
3682/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3683FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3684{
3685 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3686 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3687 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3688}
3689
3690/* Opcode VEX.F3.0F 0x74 - invalid */
3691/* Opcode VEX.F2.0F 0x74 - invalid */
3692
3693
3694/* Opcode VEX.0F 0x75 - invalid */
3695
3696
3697/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3698FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3699{
3700 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3701 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3702 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3703}
3704
3705
3706/* Opcode VEX.F3.0F 0x75 - invalid */
3707/* Opcode VEX.F2.0F 0x75 - invalid */
3708
3709
3710/* Opcode VEX.0F 0x76 - invalid */
3711
3712
3713/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3714FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3715{
3716 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3717 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3718 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3719}
3720
3721
3722/* Opcode VEX.F3.0F 0x76 - invalid */
3723/* Opcode VEX.F2.0F 0x76 - invalid */
3724
3725
3726/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3727FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3728{
3729 Assert(pVCpu->iem.s.uVexLength <= 1);
3730 if (pVCpu->iem.s.uVexLength == 0)
3731 {
3732 /*
3733 * 128-bit: vzeroupper
3734 */
3735 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3736 IEM_MC_BEGIN(0, 0);
3737
3738 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3740 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3741
3742 IEM_MC_CLEAR_YREG_128_UP(0);
3743 IEM_MC_CLEAR_YREG_128_UP(1);
3744 IEM_MC_CLEAR_YREG_128_UP(2);
3745 IEM_MC_CLEAR_YREG_128_UP(3);
3746 IEM_MC_CLEAR_YREG_128_UP(4);
3747 IEM_MC_CLEAR_YREG_128_UP(5);
3748 IEM_MC_CLEAR_YREG_128_UP(6);
3749 IEM_MC_CLEAR_YREG_128_UP(7);
3750
3751 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3752 {
3753 IEM_MC_CLEAR_YREG_128_UP( 8);
3754 IEM_MC_CLEAR_YREG_128_UP( 9);
3755 IEM_MC_CLEAR_YREG_128_UP(10);
3756 IEM_MC_CLEAR_YREG_128_UP(11);
3757 IEM_MC_CLEAR_YREG_128_UP(12);
3758 IEM_MC_CLEAR_YREG_128_UP(13);
3759 IEM_MC_CLEAR_YREG_128_UP(14);
3760 IEM_MC_CLEAR_YREG_128_UP(15);
3761 }
3762
3763 IEM_MC_ADVANCE_RIP_AND_FINISH();
3764 IEM_MC_END();
3765 }
3766 else
3767 {
3768 /*
3769 * 256-bit: vzeroall
3770 */
3771 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3772 IEM_MC_BEGIN(0, 1);
3773 IEM_MC_LOCAL(uint32_t, uZero);
3774
3775 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3776 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3777 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3778
3779 IEM_MC_ASSIGN(uZero, 0);
3780 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3781 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3782 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3783 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3784 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3785 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3786 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3787 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3788
3789 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3790 {
3791 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3792 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3793 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3794 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3795 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3796 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3797 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3798 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3799 }
3800
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 IEM_MC_END();
3803 }
3804}
3805
3806
3807/* Opcode VEX.66.0F 0x77 - invalid */
3808/* Opcode VEX.F3.0F 0x77 - invalid */
3809/* Opcode VEX.F2.0F 0x77 - invalid */
3810
3811/* Opcode VEX.0F 0x78 - invalid */
3812/* Opcode VEX.66.0F 0x78 - invalid */
3813/* Opcode VEX.F3.0F 0x78 - invalid */
3814/* Opcode VEX.F2.0F 0x78 - invalid */
3815
3816/* Opcode VEX.0F 0x79 - invalid */
3817/* Opcode VEX.66.0F 0x79 - invalid */
3818/* Opcode VEX.F3.0F 0x79 - invalid */
3819/* Opcode VEX.F2.0F 0x79 - invalid */
3820
3821/* Opcode VEX.0F 0x7a - invalid */
3822/* Opcode VEX.66.0F 0x7a - invalid */
3823/* Opcode VEX.F3.0F 0x7a - invalid */
3824/* Opcode VEX.F2.0F 0x7a - invalid */
3825
3826/* Opcode VEX.0F 0x7b - invalid */
3827/* Opcode VEX.66.0F 0x7b - invalid */
3828/* Opcode VEX.F3.0F 0x7b - invalid */
3829/* Opcode VEX.F2.0F 0x7b - invalid */
3830
3831/* Opcode VEX.0F 0x7c - invalid */
3832/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3833FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3834/* Opcode VEX.F3.0F 0x7c - invalid */
3835/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3836FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3837
3838/* Opcode VEX.0F 0x7d - invalid */
3839/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3840FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3841/* Opcode VEX.F3.0F 0x7d - invalid */
3842/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3843FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3844
3845
3846/* Opcode VEX.0F 0x7e - invalid */
3847
3848FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3849{
3850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3851 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3852 {
3853 /**
3854 * @opcode 0x7e
3855 * @opcodesub rex.w=1
3856 * @oppfx 0x66
3857 * @opcpuid avx
3858 * @opgroup og_avx_simdint_datamov
3859 * @opxcpttype 5
3860 * @optest 64-bit / op1=1 op2=2 -> op1=2
3861 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3862 */
3863 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3864 if (IEM_IS_MODRM_REG_MODE(bRm))
3865 {
3866 /* greg64, XMM */
3867 IEM_MC_BEGIN(0, 1);
3868 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3869 IEM_MC_LOCAL(uint64_t, u64Tmp);
3870
3871 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3872 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3873
3874 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3875 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3876
3877 IEM_MC_ADVANCE_RIP_AND_FINISH();
3878 IEM_MC_END();
3879 }
3880 else
3881 {
3882 /* [mem64], XMM */
3883 IEM_MC_BEGIN(0, 2);
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3885 IEM_MC_LOCAL(uint64_t, u64Tmp);
3886
3887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3888 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3890 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3891
3892 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3893 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3894
3895 IEM_MC_ADVANCE_RIP_AND_FINISH();
3896 IEM_MC_END();
3897 }
3898 }
3899 else
3900 {
3901 /**
3902 * @opdone
3903 * @opcode 0x7e
3904 * @opcodesub rex.w=0
3905 * @oppfx 0x66
3906 * @opcpuid avx
3907 * @opgroup og_avx_simdint_datamov
3908 * @opxcpttype 5
3909 * @opfunction iemOp_vmovd_q_Vy_Ey
3910 * @optest op1=1 op2=2 -> op1=2
3911 * @optest op1=0 op2=-42 -> op1=-42
3912 */
3913 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3914 if (IEM_IS_MODRM_REG_MODE(bRm))
3915 {
3916 /* greg32, XMM */
3917 IEM_MC_BEGIN(0, 1);
3918 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3919 IEM_MC_LOCAL(uint32_t, u32Tmp);
3920
3921 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3922 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3923
3924 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3925 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3926
3927 IEM_MC_ADVANCE_RIP_AND_FINISH();
3928 IEM_MC_END();
3929 }
3930 else
3931 {
3932 /* [mem32], XMM */
3933 IEM_MC_BEGIN(0, 2);
3934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3935 IEM_MC_LOCAL(uint32_t, u32Tmp);
3936
3937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3938 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3939 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3940 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3941
3942 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3943 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3944
3945 IEM_MC_ADVANCE_RIP_AND_FINISH();
3946 IEM_MC_END();
3947 }
3948 }
3949}
3950
3951
3952/**
3953 * @opcode 0x7e
3954 * @oppfx 0xf3
3955 * @opcpuid avx
3956 * @opgroup og_avx_pcksclr_datamove
3957 * @opxcpttype none
3958 * @optest op1=1 op2=2 -> op1=2
3959 * @optest op1=0 op2=-42 -> op1=-42
3960 */
3961FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3962{
3963 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3965 if (IEM_IS_MODRM_REG_MODE(bRm))
3966 {
3967 /*
3968 * Register, register.
3969 */
3970 IEM_MC_BEGIN(0, 0);
3971 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3972
3973 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3974 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3975
3976 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3977 IEM_GET_MODRM_RM(pVCpu, bRm));
3978 IEM_MC_ADVANCE_RIP_AND_FINISH();
3979 IEM_MC_END();
3980 }
3981 else
3982 {
3983 /*
3984 * Memory, register.
3985 */
3986 IEM_MC_BEGIN(0, 2);
3987 IEM_MC_LOCAL(uint64_t, uSrc);
3988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3989
3990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3991 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3992 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3993 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3994
3995 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3996 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3997
3998 IEM_MC_ADVANCE_RIP_AND_FINISH();
3999 IEM_MC_END();
4000 }
4001
4002}
4003/* Opcode VEX.F2.0F 0x7e - invalid */
4004
4005
4006/* Opcode VEX.0F 0x7f - invalid */
4007
4008/**
4009 * @opcode 0x7f
4010 * @oppfx 0x66
4011 * @opcpuid avx
4012 * @opgroup og_avx_simdint_datamove
4013 * @opxcpttype 1
4014 * @optest op1=1 op2=2 -> op1=2
4015 * @optest op1=0 op2=-42 -> op1=-42
4016 */
4017FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4018{
4019 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4020 Assert(pVCpu->iem.s.uVexLength <= 1);
4021 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4022 if (IEM_IS_MODRM_REG_MODE(bRm))
4023 {
4024 /*
4025 * Register, register.
4026 */
4027 IEM_MC_BEGIN(0, 0);
4028 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4029
4030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4031 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4032 if (pVCpu->iem.s.uVexLength == 0)
4033 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4034 IEM_GET_MODRM_REG(pVCpu, bRm));
4035 else
4036 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4037 IEM_GET_MODRM_REG(pVCpu, bRm));
4038 IEM_MC_ADVANCE_RIP_AND_FINISH();
4039 IEM_MC_END();
4040 }
4041 else if (pVCpu->iem.s.uVexLength == 0)
4042 {
4043 /*
4044 * Register, memory128.
4045 */
4046 IEM_MC_BEGIN(0, 2);
4047 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4049
4050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4051 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4052 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4053 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4054
4055 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4056 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4057
4058 IEM_MC_ADVANCE_RIP_AND_FINISH();
4059 IEM_MC_END();
4060 }
4061 else
4062 {
4063 /*
4064 * Register, memory256.
4065 */
4066 IEM_MC_BEGIN(0, 2);
4067 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4069
4070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4071 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4072 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4073 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4074
4075 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4076 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4077
4078 IEM_MC_ADVANCE_RIP_AND_FINISH();
4079 IEM_MC_END();
4080 }
4081}
4082
4083
4084/**
4085 * @opcode 0x7f
4086 * @oppfx 0xf3
4087 * @opcpuid avx
4088 * @opgroup og_avx_simdint_datamove
4089 * @opxcpttype 4UA
4090 * @optest op1=1 op2=2 -> op1=2
4091 * @optest op1=0 op2=-42 -> op1=-42
4092 */
4093FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4094{
4095 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4096 Assert(pVCpu->iem.s.uVexLength <= 1);
4097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4098 if (IEM_IS_MODRM_REG_MODE(bRm))
4099 {
4100 /*
4101 * Register, register.
4102 */
4103 IEM_MC_BEGIN(0, 0);
4104 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4105
4106 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4107 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4108 if (pVCpu->iem.s.uVexLength == 0)
4109 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4110 IEM_GET_MODRM_REG(pVCpu, bRm));
4111 else
4112 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4113 IEM_GET_MODRM_REG(pVCpu, bRm));
4114 IEM_MC_ADVANCE_RIP_AND_FINISH();
4115 IEM_MC_END();
4116 }
4117 else if (pVCpu->iem.s.uVexLength == 0)
4118 {
4119 /*
4120 * Register, memory128.
4121 */
4122 IEM_MC_BEGIN(0, 2);
4123 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4125
4126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4127 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4128 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4129 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4130
4131 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4132 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4133
4134 IEM_MC_ADVANCE_RIP_AND_FINISH();
4135 IEM_MC_END();
4136 }
4137 else
4138 {
4139 /*
4140 * Register, memory256.
4141 */
4142 IEM_MC_BEGIN(0, 2);
4143 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4145
4146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4147 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4148 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4149 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4150
4151 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4152 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4153
4154 IEM_MC_ADVANCE_RIP_AND_FINISH();
4155 IEM_MC_END();
4156 }
4157}
4158
4159/* Opcode VEX.F2.0F 0x7f - invalid */
4160
4161
4162/* Opcode VEX.0F 0x80 - invalid */
4163/* Opcode VEX.0F 0x81 - invalid */
4164/* Opcode VEX.0F 0x82 - invalid */
4165/* Opcode VEX.0F 0x83 - invalid */
4166/* Opcode VEX.0F 0x84 - invalid */
4167/* Opcode VEX.0F 0x85 - invalid */
4168/* Opcode VEX.0F 0x86 - invalid */
4169/* Opcode VEX.0F 0x87 - invalid */
4170/* Opcode VEX.0F 0x88 - invalid */
4171/* Opcode VEX.0F 0x89 - invalid */
4172/* Opcode VEX.0F 0x8a - invalid */
4173/* Opcode VEX.0F 0x8b - invalid */
4174/* Opcode VEX.0F 0x8c - invalid */
4175/* Opcode VEX.0F 0x8d - invalid */
4176/* Opcode VEX.0F 0x8e - invalid */
4177/* Opcode VEX.0F 0x8f - invalid */
4178/* Opcode VEX.0F 0x90 - invalid */
4179/* Opcode VEX.0F 0x91 - invalid */
4180/* Opcode VEX.0F 0x92 - invalid */
4181/* Opcode VEX.0F 0x93 - invalid */
4182/* Opcode VEX.0F 0x94 - invalid */
4183/* Opcode VEX.0F 0x95 - invalid */
4184/* Opcode VEX.0F 0x96 - invalid */
4185/* Opcode VEX.0F 0x97 - invalid */
4186/* Opcode VEX.0F 0x98 - invalid */
4187/* Opcode VEX.0F 0x99 - invalid */
4188/* Opcode VEX.0F 0x9a - invalid */
4189/* Opcode VEX.0F 0x9b - invalid */
4190/* Opcode VEX.0F 0x9c - invalid */
4191/* Opcode VEX.0F 0x9d - invalid */
4192/* Opcode VEX.0F 0x9e - invalid */
4193/* Opcode VEX.0F 0x9f - invalid */
4194/* Opcode VEX.0F 0xa0 - invalid */
4195/* Opcode VEX.0F 0xa1 - invalid */
4196/* Opcode VEX.0F 0xa2 - invalid */
4197/* Opcode VEX.0F 0xa3 - invalid */
4198/* Opcode VEX.0F 0xa4 - invalid */
4199/* Opcode VEX.0F 0xa5 - invalid */
4200/* Opcode VEX.0F 0xa6 - invalid */
4201/* Opcode VEX.0F 0xa7 - invalid */
4202/* Opcode VEX.0F 0xa8 - invalid */
4203/* Opcode VEX.0F 0xa9 - invalid */
4204/* Opcode VEX.0F 0xaa - invalid */
4205/* Opcode VEX.0F 0xab - invalid */
4206/* Opcode VEX.0F 0xac - invalid */
4207/* Opcode VEX.0F 0xad - invalid */
4208
4209
4210/* Opcode VEX.0F 0xae mem/0 - invalid. */
4211/* Opcode VEX.0F 0xae mem/1 - invalid. */
4212
4213/**
4214 * @ opmaps grp15
4215 * @ opcode !11/2
4216 * @ oppfx none
4217 * @ opcpuid sse
4218 * @ opgroup og_sse_mxcsrsm
4219 * @ opxcpttype 5
4220 * @ optest op1=0 -> mxcsr=0
4221 * @ optest op1=0x2083 -> mxcsr=0x2083
4222 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4223 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4224 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4225 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4226 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4227 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4228 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4229 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4230 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4231 */
4232FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4233//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4234//{
4235// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4236// IEM_MC_BEGIN(2, 0);
4237// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4238// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4239// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4240// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4241// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4242// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4243// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4244// IEM_MC_END();
4245// return VINF_SUCCESS;
4246//}
4247
4248
4249/**
4250 * @opmaps vexgrp15
4251 * @opcode !11/3
4252 * @oppfx none
4253 * @opcpuid avx
4254 * @opgroup og_avx_mxcsrsm
4255 * @opxcpttype 5
4256 * @optest mxcsr=0 -> op1=0
4257 * @optest mxcsr=0x2083 -> op1=0x2083
4258 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4259 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4260 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4261 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4262 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4263 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4264 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4265 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4266 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4267 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4268 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4269 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4270 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4271 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4272 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4273 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4274 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4275 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4276 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4277 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4278 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4279 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4280 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4281 * -> value.xcpt=0x6
4282 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4283 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4284 * APMv4 rev 3.17 page 509.
4285 * @todo Test this instruction on AMD Ryzen.
4286 */
4287FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4288{
4289 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4290 IEM_MC_BEGIN(2, 0);
4291 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4292 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4294 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4295 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4296 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4297 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4298 IEM_MC_END();
4299}
4300
4301/* Opcode VEX.0F 0xae mem/4 - invalid. */
4302/* Opcode VEX.0F 0xae mem/5 - invalid. */
4303/* Opcode VEX.0F 0xae mem/6 - invalid. */
4304/* Opcode VEX.0F 0xae mem/7 - invalid. */
4305
4306/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4307/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4308/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4309/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4310/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4311/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4312/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4313/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4314
4315/**
4316 * Vex group 15 jump table for memory variant.
4317 */
4318IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4319{ /* pfx: none, 066h, 0f3h, 0f2h */
4320 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4321 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4322 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4323 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4324 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4325 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4326 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4327 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4328};
4329AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4330
4331
4332/** Opcode vex. 0xae. */
4333FNIEMOP_DEF(iemOp_VGrp15)
4334{
4335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4336 if (IEM_IS_MODRM_REG_MODE(bRm))
4337 /* register, register */
4338 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4339
4340 /* memory, register */
4341 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4342 + pVCpu->iem.s.idxPrefix], bRm);
4343}
4344
4345
4346/* Opcode VEX.0F 0xaf - invalid. */
4347
4348/* Opcode VEX.0F 0xb0 - invalid. */
4349/* Opcode VEX.0F 0xb1 - invalid. */
4350/* Opcode VEX.0F 0xb2 - invalid. */
4351/* Opcode VEX.0F 0xb2 - invalid. */
4352/* Opcode VEX.0F 0xb3 - invalid. */
4353/* Opcode VEX.0F 0xb4 - invalid. */
4354/* Opcode VEX.0F 0xb5 - invalid. */
4355/* Opcode VEX.0F 0xb6 - invalid. */
4356/* Opcode VEX.0F 0xb7 - invalid. */
4357/* Opcode VEX.0F 0xb8 - invalid. */
4358/* Opcode VEX.0F 0xb9 - invalid. */
4359/* Opcode VEX.0F 0xba - invalid. */
4360/* Opcode VEX.0F 0xbb - invalid. */
4361/* Opcode VEX.0F 0xbc - invalid. */
4362/* Opcode VEX.0F 0xbd - invalid. */
4363/* Opcode VEX.0F 0xbe - invalid. */
4364/* Opcode VEX.0F 0xbf - invalid. */
4365
4366/* Opcode VEX.0F 0xc0 - invalid. */
4367/* Opcode VEX.66.0F 0xc0 - invalid. */
4368/* Opcode VEX.F3.0F 0xc0 - invalid. */
4369/* Opcode VEX.F2.0F 0xc0 - invalid. */
4370
4371/* Opcode VEX.0F 0xc1 - invalid. */
4372/* Opcode VEX.66.0F 0xc1 - invalid. */
4373/* Opcode VEX.F3.0F 0xc1 - invalid. */
4374/* Opcode VEX.F2.0F 0xc1 - invalid. */
4375
4376/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4377FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4378/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4379FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4380/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4381FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4382/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4383FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4384
4385/* Opcode VEX.0F 0xc3 - invalid */
4386/* Opcode VEX.66.0F 0xc3 - invalid */
4387/* Opcode VEX.F3.0F 0xc3 - invalid */
4388/* Opcode VEX.F2.0F 0xc3 - invalid */
4389
4390/* Opcode VEX.0F 0xc4 - invalid */
4391
4392
4393/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4394FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4395{
4396 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4398 if (IEM_IS_MODRM_REG_MODE(bRm))
4399 {
4400 /*
4401 * Register, register.
4402 */
4403 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4404 IEM_MC_BEGIN(4, 0);
4405 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4406 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4407 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4408 IEM_MC_ARG(uint16_t, u16Src, 2);
4409 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4410 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4411 IEM_MC_PREPARE_AVX_USAGE();
4412 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4413 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4414 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4415 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4416 puDst, puSrc, u16Src, bImmArg);
4417 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4418 IEM_MC_ADVANCE_RIP_AND_FINISH();
4419 IEM_MC_END();
4420 }
4421 else
4422 {
4423 /*
4424 * Register, memory.
4425 */
4426 IEM_MC_BEGIN(4, 1);
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4428 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4429 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4430 IEM_MC_ARG(uint16_t, u16Src, 2);
4431
4432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4433 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4434 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4435 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4437 IEM_MC_PREPARE_AVX_USAGE();
4438
4439 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4440 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4441 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4442 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4443 puDst, puSrc, u16Src, bImmArg);
4444 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4445
4446 IEM_MC_ADVANCE_RIP_AND_FINISH();
4447 IEM_MC_END();
4448 }
4449}
4450
4451
4452/* Opcode VEX.F3.0F 0xc4 - invalid */
4453/* Opcode VEX.F2.0F 0xc4 - invalid */
4454
4455/* Opcode VEX.0F 0xc5 - invlid */
4456
4457
4458/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4459FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4460{
4461 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4463 if (IEM_IS_MODRM_REG_MODE(bRm))
4464 {
4465 /*
4466 * Register, register.
4467 */
4468 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4469 IEM_MC_BEGIN(3, 1);
4470 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4471 IEM_MC_LOCAL(uint16_t, u16Dst);
4472 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4473 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4474 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4475 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4476 IEM_MC_PREPARE_AVX_USAGE();
4477 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4478 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4479 pu16Dst, puSrc, bImmArg);
4480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4481 IEM_MC_ADVANCE_RIP_AND_FINISH();
4482 IEM_MC_END();
4483 }
4484 /* No memory operand. */
4485 else
4486 IEMOP_RAISE_INVALID_OPCODE_RET();
4487}
4488
4489
4490/* Opcode VEX.F3.0F 0xc5 - invalid */
4491/* Opcode VEX.F2.0F 0xc5 - invalid */
4492
4493
4494#define VSHUFP_X(a_Instr) \
4495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4496 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4497 { \
4498 /* \
4499 * Register, register. \
4500 */ \
4501 if (pVCpu->iem.s.uVexLength) \
4502 { \
4503 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4504 IEM_MC_BEGIN(4, 3); \
4505 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4506 IEM_MC_LOCAL(RTUINT256U, uDst); \
4507 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4508 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4509 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4510 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4511 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4512 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4513 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4514 IEM_MC_PREPARE_AVX_USAGE(); \
4515 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4516 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4517 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4518 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4519 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4521 IEM_MC_END(); \
4522 } \
4523 else \
4524 { \
4525 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4526 IEM_MC_BEGIN(4, 0); \
4527 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4528 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4529 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4530 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4531 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4532 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4533 IEM_MC_PREPARE_AVX_USAGE(); \
4534 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4535 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4536 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4537 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4538 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4539 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4541 IEM_MC_END(); \
4542 } \
4543 } \
4544 else \
4545 { \
4546 /* \
4547 * Register, memory. \
4548 */ \
4549 if (pVCpu->iem.s.uVexLength) \
4550 { \
4551 IEM_MC_BEGIN(4, 4); \
4552 IEM_MC_LOCAL(RTUINT256U, uDst); \
4553 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4554 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4556 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4557 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4558 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4560 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4561 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4562 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4563 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4564 IEM_MC_PREPARE_AVX_USAGE(); \
4565 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4566 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4567 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4568 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4569 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4570 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4571 IEM_MC_END(); \
4572 } \
4573 else \
4574 { \
4575 IEM_MC_BEGIN(4, 2); \
4576 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4578 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4579 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4580 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4582 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4583 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4584 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4585 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4586 IEM_MC_PREPARE_AVX_USAGE(); \
4587 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4588 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4589 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4590 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4591 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4592 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4593 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4594 IEM_MC_END(); \
4595 } \
4596 } \
4597 (void)0
4598
4599/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4600FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4601{
4602 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4603 VSHUFP_X(vshufps);
4604}
4605
4606
4607/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4608FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4609{
4610 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4611 VSHUFP_X(vshufpd);
4612}
4613#undef VSHUFP_X
4614
4615
4616/* Opcode VEX.F3.0F 0xc6 - invalid */
4617/* Opcode VEX.F2.0F 0xc6 - invalid */
4618
4619/* Opcode VEX.0F 0xc7 - invalid */
4620/* Opcode VEX.66.0F 0xc7 - invalid */
4621/* Opcode VEX.F3.0F 0xc7 - invalid */
4622/* Opcode VEX.F2.0F 0xc7 - invalid */
4623
4624/* Opcode VEX.0F 0xc8 - invalid */
4625/* Opcode VEX.0F 0xc9 - invalid */
4626/* Opcode VEX.0F 0xca - invalid */
4627/* Opcode VEX.0F 0xcb - invalid */
4628/* Opcode VEX.0F 0xcc - invalid */
4629/* Opcode VEX.0F 0xcd - invalid */
4630/* Opcode VEX.0F 0xce - invalid */
4631/* Opcode VEX.0F 0xcf - invalid */
4632
4633
4634/* Opcode VEX.0F 0xd0 - invalid */
4635/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4636FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4637/* Opcode VEX.F3.0F 0xd0 - invalid */
4638/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4639FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4640
4641/* Opcode VEX.0F 0xd1 - invalid */
4642/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4643FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4644/* Opcode VEX.F3.0F 0xd1 - invalid */
4645/* Opcode VEX.F2.0F 0xd1 - invalid */
4646
4647/* Opcode VEX.0F 0xd2 - invalid */
4648/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4649FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4650/* Opcode VEX.F3.0F 0xd2 - invalid */
4651/* Opcode VEX.F2.0F 0xd2 - invalid */
4652
4653/* Opcode VEX.0F 0xd3 - invalid */
4654/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4655FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4656/* Opcode VEX.F3.0F 0xd3 - invalid */
4657/* Opcode VEX.F2.0F 0xd3 - invalid */
4658
4659/* Opcode VEX.0F 0xd4 - invalid */
4660
4661
4662/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4663FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4664{
4665 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4666 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4667 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4668}
4669
4670
4671/* Opcode VEX.F3.0F 0xd4 - invalid */
4672/* Opcode VEX.F2.0F 0xd4 - invalid */
4673
4674/* Opcode VEX.0F 0xd5 - invalid */
4675
4676
4677/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4678FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4679{
4680 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4681 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4682 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4683}
4684
4685
4686/* Opcode VEX.F3.0F 0xd5 - invalid */
4687/* Opcode VEX.F2.0F 0xd5 - invalid */
4688
4689/* Opcode VEX.0F 0xd6 - invalid */
4690
4691/**
4692 * @opcode 0xd6
4693 * @oppfx 0x66
4694 * @opcpuid avx
4695 * @opgroup og_avx_pcksclr_datamove
4696 * @opxcpttype none
4697 * @optest op1=-1 op2=2 -> op1=2
4698 * @optest op1=0 op2=-42 -> op1=-42
4699 */
4700FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4701{
4702 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4704 if (IEM_IS_MODRM_REG_MODE(bRm))
4705 {
4706 /*
4707 * Register, register.
4708 */
4709 IEM_MC_BEGIN(0, 0);
4710 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4711
4712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4714
4715 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4716 IEM_GET_MODRM_REG(pVCpu, bRm));
4717 IEM_MC_ADVANCE_RIP_AND_FINISH();
4718 IEM_MC_END();
4719 }
4720 else
4721 {
4722 /*
4723 * Memory, register.
4724 */
4725 IEM_MC_BEGIN(0, 2);
4726 IEM_MC_LOCAL(uint64_t, uSrc);
4727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4728
4729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4730 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4731 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4732 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4733
4734 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4735 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4736
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740}
4741
4742/* Opcode VEX.F3.0F 0xd6 - invalid */
4743/* Opcode VEX.F2.0F 0xd6 - invalid */
4744
4745
4746/* Opcode VEX.0F 0xd7 - invalid */
4747
4748/** Opcode VEX.66.0F 0xd7 - */
4749FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4750{
4751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4752 /* Docs says register only. */
4753 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4754 {
4755 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4756 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4757 if (pVCpu->iem.s.uVexLength)
4758 {
4759 IEM_MC_BEGIN(2, 1);
4760 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4761 IEM_MC_ARG(uint64_t *, puDst, 0);
4762 IEM_MC_LOCAL(RTUINT256U, uSrc);
4763 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4764 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4765 IEM_MC_PREPARE_AVX_USAGE();
4766 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4767 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4768 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4769 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4770 IEM_MC_ADVANCE_RIP_AND_FINISH();
4771 IEM_MC_END();
4772 }
4773 else
4774 {
4775 IEM_MC_BEGIN(2, 0);
4776 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4777 IEM_MC_ARG(uint64_t *, puDst, 0);
4778 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4779 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4780 IEM_MC_PREPARE_AVX_USAGE();
4781 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4782 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4783 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 }
4787 }
4788 else
4789 IEMOP_RAISE_INVALID_OPCODE_RET();
4790}
4791
4792
4793/* Opcode VEX.F3.0F 0xd7 - invalid */
4794/* Opcode VEX.F2.0F 0xd7 - invalid */
4795
4796
4797/* Opcode VEX.0F 0xd8 - invalid */
4798
4799/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
4800FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
4801{
4802 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4803 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
4804 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4805}
4806
4807
4808/* Opcode VEX.F3.0F 0xd8 - invalid */
4809/* Opcode VEX.F2.0F 0xd8 - invalid */
4810
4811/* Opcode VEX.0F 0xd9 - invalid */
4812
4813
4814/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4815FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
4816{
4817 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4818 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
4819 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4820}
4821
4822
4823/* Opcode VEX.F3.0F 0xd9 - invalid */
4824/* Opcode VEX.F2.0F 0xd9 - invalid */
4825
4826/* Opcode VEX.0F 0xda - invalid */
4827
4828
4829/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4830FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4831{
4832 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4833 IEMOPMEDIAF3_INIT_VARS(vpminub);
4834 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4835}
4836
4837
4838/* Opcode VEX.F3.0F 0xda - invalid */
4839/* Opcode VEX.F2.0F 0xda - invalid */
4840
4841/* Opcode VEX.0F 0xdb - invalid */
4842
4843
4844/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4845FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4846{
4847 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4848 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4849 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4850}
4851
4852
4853/* Opcode VEX.F3.0F 0xdb - invalid */
4854/* Opcode VEX.F2.0F 0xdb - invalid */
4855
4856/* Opcode VEX.0F 0xdc - invalid */
4857
4858
4859/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4860FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
4861{
4862 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4863 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
4864 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4865}
4866
4867
4868/* Opcode VEX.F3.0F 0xdc - invalid */
4869/* Opcode VEX.F2.0F 0xdc - invalid */
4870
4871/* Opcode VEX.0F 0xdd - invalid */
4872
4873
4874/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4875FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
4876{
4877 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4878 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
4879 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4880}
4881
4882
4883/* Opcode VEX.F3.0F 0xdd - invalid */
4884/* Opcode VEX.F2.0F 0xdd - invalid */
4885
4886/* Opcode VEX.0F 0xde - invalid */
4887
4888
4889/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4890FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4891{
4892 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4893 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4894 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4895}
4896
4897
4898/* Opcode VEX.F3.0F 0xde - invalid */
4899/* Opcode VEX.F2.0F 0xde - invalid */
4900
4901/* Opcode VEX.0F 0xdf - invalid */
4902
4903
4904/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4905FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4906{
4907 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4908 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4909 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4910}
4911
4912
4913/* Opcode VEX.F3.0F 0xdf - invalid */
4914/* Opcode VEX.F2.0F 0xdf - invalid */
4915
4916/* Opcode VEX.0F 0xe0 - invalid */
4917
4918
4919/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4920FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4921{
4922 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4923 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4924 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4925}
4926
4927
4928/* Opcode VEX.F3.0F 0xe0 - invalid */
4929/* Opcode VEX.F2.0F 0xe0 - invalid */
4930
4931/* Opcode VEX.0F 0xe1 - invalid */
4932/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4933FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4934/* Opcode VEX.F3.0F 0xe1 - invalid */
4935/* Opcode VEX.F2.0F 0xe1 - invalid */
4936
4937/* Opcode VEX.0F 0xe2 - invalid */
4938/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4939FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4940/* Opcode VEX.F3.0F 0xe2 - invalid */
4941/* Opcode VEX.F2.0F 0xe2 - invalid */
4942
4943/* Opcode VEX.0F 0xe3 - invalid */
4944
4945
4946/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4947FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4948{
4949 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4950 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4951 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4952}
4953
4954
4955/* Opcode VEX.F3.0F 0xe3 - invalid */
4956/* Opcode VEX.F2.0F 0xe3 - invalid */
4957
4958/* Opcode VEX.0F 0xe4 - invalid */
4959
4960
4961/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4962FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4963{
4964 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4965 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4967}
4968
4969
4970/* Opcode VEX.F3.0F 0xe4 - invalid */
4971/* Opcode VEX.F2.0F 0xe4 - invalid */
4972
4973/* Opcode VEX.0F 0xe5 - invalid */
4974
4975
4976/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4977FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4978{
4979 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4980 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4981 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4982}
4983
4984
4985/* Opcode VEX.F3.0F 0xe5 - invalid */
4986/* Opcode VEX.F2.0F 0xe5 - invalid */
4987
4988/* Opcode VEX.0F 0xe6 - invalid */
4989/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4990FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4991/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4992FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4993/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4994FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4995
4996
4997/* Opcode VEX.0F 0xe7 - invalid */
4998
4999/**
5000 * @opcode 0xe7
5001 * @opcodesub !11 mr/reg
5002 * @oppfx 0x66
5003 * @opcpuid avx
5004 * @opgroup og_avx_cachect
5005 * @opxcpttype 1
5006 * @optest op1=-1 op2=2 -> op1=2
5007 * @optest op1=0 op2=-42 -> op1=-42
5008 */
5009FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5010{
5011 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5012 Assert(pVCpu->iem.s.uVexLength <= 1);
5013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5014 if (IEM_IS_MODRM_MEM_MODE(bRm))
5015 {
5016 if (pVCpu->iem.s.uVexLength == 0)
5017 {
5018 /*
5019 * 128-bit: Memory, register.
5020 */
5021 IEM_MC_BEGIN(0, 2);
5022 IEM_MC_LOCAL(RTUINT128U, uSrc);
5023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5024
5025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5026 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5028 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5029
5030 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5031 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5032
5033 IEM_MC_ADVANCE_RIP_AND_FINISH();
5034 IEM_MC_END();
5035 }
5036 else
5037 {
5038 /*
5039 * 256-bit: Memory, register.
5040 */
5041 IEM_MC_BEGIN(0, 2);
5042 IEM_MC_LOCAL(RTUINT256U, uSrc);
5043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5044
5045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5046 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5047 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5048 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5049
5050 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5051 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5052
5053 IEM_MC_ADVANCE_RIP_AND_FINISH();
5054 IEM_MC_END();
5055 }
5056 }
5057 /**
5058 * @opdone
5059 * @opmnemonic udvex660fe7reg
5060 * @opcode 0xe7
5061 * @opcodesub 11 mr/reg
5062 * @oppfx 0x66
5063 * @opunused immediate
5064 * @opcpuid avx
5065 * @optest ->
5066 */
5067 else
5068 IEMOP_RAISE_INVALID_OPCODE_RET();
5069}
5070
5071/* Opcode VEX.F3.0F 0xe7 - invalid */
5072/* Opcode VEX.F2.0F 0xe7 - invalid */
5073
5074
5075/* Opcode VEX.0F 0xe8 - invalid */
5076
5077
5078/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5079FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5080{
5081 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5082 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5083 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5084}
5085
5086
5087/* Opcode VEX.F3.0F 0xe8 - invalid */
5088/* Opcode VEX.F2.0F 0xe8 - invalid */
5089
5090/* Opcode VEX.0F 0xe9 - invalid */
5091
5092
5093/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5094FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5095{
5096 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5097 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5098 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5099}
5100
5101
5102/* Opcode VEX.F3.0F 0xe9 - invalid */
5103/* Opcode VEX.F2.0F 0xe9 - invalid */
5104
5105/* Opcode VEX.0F 0xea - invalid */
5106
5107
5108/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5109FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5110{
5111 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5112 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5113 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5114}
5115
5116
5117/* Opcode VEX.F3.0F 0xea - invalid */
5118/* Opcode VEX.F2.0F 0xea - invalid */
5119
5120/* Opcode VEX.0F 0xeb - invalid */
5121
5122
5123/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5124FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5125{
5126 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5127 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5128 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5129}
5130
5131
5132
5133/* Opcode VEX.F3.0F 0xeb - invalid */
5134/* Opcode VEX.F2.0F 0xeb - invalid */
5135
5136/* Opcode VEX.0F 0xec - invalid */
5137
5138
5139/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5140FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5141{
5142 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5143 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5144 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5145}
5146
5147
5148/* Opcode VEX.F3.0F 0xec - invalid */
5149/* Opcode VEX.F2.0F 0xec - invalid */
5150
5151/* Opcode VEX.0F 0xed - invalid */
5152
5153
5154/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5155FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5156{
5157 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5158 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5159 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5160}
5161
5162
5163/* Opcode VEX.F3.0F 0xed - invalid */
5164/* Opcode VEX.F2.0F 0xed - invalid */
5165
5166/* Opcode VEX.0F 0xee - invalid */
5167
5168
5169/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5170FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5171{
5172 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5173 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5174 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5175}
5176
5177
5178/* Opcode VEX.F3.0F 0xee - invalid */
5179/* Opcode VEX.F2.0F 0xee - invalid */
5180
5181
5182/* Opcode VEX.0F 0xef - invalid */
5183
5184
5185/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5186FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5187{
5188 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5190 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5191}
5192
5193
5194/* Opcode VEX.F3.0F 0xef - invalid */
5195/* Opcode VEX.F2.0F 0xef - invalid */
5196
5197/* Opcode VEX.0F 0xf0 - invalid */
5198/* Opcode VEX.66.0F 0xf0 - invalid */
5199
5200
5201/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5202FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5203{
5204 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5205 Assert(pVCpu->iem.s.uVexLength <= 1);
5206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5207 if (IEM_IS_MODRM_REG_MODE(bRm))
5208 {
5209 /*
5210 * Register, register - (not implemented, assuming it raises \#UD).
5211 */
5212 IEMOP_RAISE_INVALID_OPCODE_RET();
5213 }
5214 else if (pVCpu->iem.s.uVexLength == 0)
5215 {
5216 /*
5217 * Register, memory128.
5218 */
5219 IEM_MC_BEGIN(0, 2);
5220 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5222
5223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5224 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5226 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5227
5228 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5229 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5230
5231 IEM_MC_ADVANCE_RIP_AND_FINISH();
5232 IEM_MC_END();
5233 }
5234 else
5235 {
5236 /*
5237 * Register, memory256.
5238 */
5239 IEM_MC_BEGIN(0, 2);
5240 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5242
5243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5244 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5247
5248 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5249 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5250
5251 IEM_MC_ADVANCE_RIP_AND_FINISH();
5252 IEM_MC_END();
5253 }
5254}
5255
5256
5257/* Opcode VEX.0F 0xf1 - invalid */
5258/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5259FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5260/* Opcode VEX.F2.0F 0xf1 - invalid */
5261
5262/* Opcode VEX.0F 0xf2 - invalid */
5263/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5264FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5265/* Opcode VEX.F2.0F 0xf2 - invalid */
5266
5267/* Opcode VEX.0F 0xf3 - invalid */
5268/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5269FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5270/* Opcode VEX.F2.0F 0xf3 - invalid */
5271
5272/* Opcode VEX.0F 0xf4 - invalid */
5273
5274
5275/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5276FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5277{
5278 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5279 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5280 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5281}
5282
5283
5284/* Opcode VEX.F2.0F 0xf4 - invalid */
5285
5286/* Opcode VEX.0F 0xf5 - invalid */
5287/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5288FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5289/* Opcode VEX.F2.0F 0xf5 - invalid */
5290
5291/* Opcode VEX.0F 0xf6 - invalid */
5292
5293
5294/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5295FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5296{
5297 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5298 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5299 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5300}
5301
5302
5303/* Opcode VEX.F2.0F 0xf6 - invalid */
5304
5305/* Opcode VEX.0F 0xf7 - invalid */
5306/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5307FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5308/* Opcode VEX.F2.0F 0xf7 - invalid */
5309
5310/* Opcode VEX.0F 0xf8 - invalid */
5311
5312
5313/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5314FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5315{
5316 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5317 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5318 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5319}
5320
5321
5322/* Opcode VEX.F2.0F 0xf8 - invalid */
5323
5324/* Opcode VEX.0F 0xf9 - invalid */
5325
5326
5327/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5328FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5329{
5330 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5331 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5332 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5333}
5334
5335
5336/* Opcode VEX.F2.0F 0xf9 - invalid */
5337
5338/* Opcode VEX.0F 0xfa - invalid */
5339
5340
5341/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5342FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5343{
5344 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5345 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5346 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5347}
5348
5349
5350/* Opcode VEX.F2.0F 0xfa - invalid */
5351
5352/* Opcode VEX.0F 0xfb - invalid */
5353
5354
5355/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5356FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5357{
5358 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5359 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5360 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5361}
5362
5363
5364/* Opcode VEX.F2.0F 0xfb - invalid */
5365
5366/* Opcode VEX.0F 0xfc - invalid */
5367
5368
5369/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5370FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5371{
5372 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5373 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5374 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5375}
5376
5377
5378/* Opcode VEX.F2.0F 0xfc - invalid */
5379
5380/* Opcode VEX.0F 0xfd - invalid */
5381
5382
5383/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5384FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5385{
5386 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5387 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5388 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5389}
5390
5391
5392/* Opcode VEX.F2.0F 0xfd - invalid */
5393
5394/* Opcode VEX.0F 0xfe - invalid */
5395
5396
5397/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5398FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5399{
5400 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5401 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5402 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5403}
5404
5405
5406/* Opcode VEX.F2.0F 0xfe - invalid */
5407
5408
5409/** Opcode **** 0x0f 0xff - UD0 */
5410FNIEMOP_DEF(iemOp_vud0)
5411{
5412 IEMOP_MNEMONIC(vud0, "vud0");
5413 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5414 {
5415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5416#ifndef TST_IEM_CHECK_MC
5417 RTGCPTR GCPtrEff;
5418 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5419 if (rcStrict != VINF_SUCCESS)
5420 return rcStrict;
5421#endif
5422 IEMOP_HLP_DONE_DECODING();
5423 }
5424 IEMOP_RAISE_INVALID_OPCODE_RET();
5425}
5426
5427
5428
5429/**
5430 * VEX opcode map \#1.
5431 *
5432 * @sa g_apfnTwoByteMap
5433 */
5434IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5435{
5436 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5437 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5438 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5439 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5440 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5441 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5442 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5443 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5444 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5445 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5446 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5447 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5448 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5449 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5450 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5451 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5452 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5453
5454 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5455 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5456 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5457 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5458 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5459 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5460 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5461 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5462 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5463 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5464 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5465 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5466 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5467 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5468 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5469 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5470
5471 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5472 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5473 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5474 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5475 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5476 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5477 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5478 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5479 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5480 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5481 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5482 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5483 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5484 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5485 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5486 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5487
5488 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5489 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5490 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5491 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5492 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5496 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5497 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5498 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5499 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5500 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5501 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5502 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5503 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5504
5505 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5506 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5507 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5508 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5509 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5510 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5511 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5512 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5513 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5514 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5515 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5516 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5517 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5518 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5519 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5520 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5521
5522 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5523 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5524 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5525 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5526 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5527 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5528 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5529 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5530 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5531 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5532 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5533 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5534 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5535 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5536 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5537 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5538
5539 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5540 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5541 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5542 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5543 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5544 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5545 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5546 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5547 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5548 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5549 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5550 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5551 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5552 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5553 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5554 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5555
5556 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5557 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5558 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5559 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5560 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5561 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5562 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5563 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5564 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5565 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5566 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5567 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5568 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5569 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5570 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5571 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5572
5573 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5574 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5575 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5576 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5577 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5578 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5579 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5580 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5581 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5582 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5583 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5584 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5585 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5586 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5587 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5588 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5589
5590 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5591 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5592 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5593 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5594 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5595 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5596 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5597 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5598 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5599 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5600 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5601 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5602 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5603 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5604 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5605 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5606
5607 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5608 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5609 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5610 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5611 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5612 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5613 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5614 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5615 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5616 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5617 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5618 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5619 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5620 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5621 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5622 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5623
5624 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5625 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5626 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5627 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5628 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5629 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5630 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5631 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5632 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5633 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5634 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5635 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5636 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5637 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5638 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5639 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5640
5641 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5642 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5643 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5644 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5645 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5646 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5647 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5648 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5649 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5650 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5651 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5652 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5653 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5654 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5655 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5656 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5657
5658 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5659 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5660 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5661 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5662 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5663 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5664 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5665 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5666 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5667 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5668 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5669 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5670 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5671 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5672 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5673 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5674
5675 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5676 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5677 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5678 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5679 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5680 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5681 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5682 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5683 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5684 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5685 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5686 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5687 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5688 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5689 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5690 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5691
5692 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5693 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5694 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5695 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5696 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5697 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5698 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5699 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5700 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5701 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5702 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5703 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5704 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5705 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5706 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5707 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5708};
5709AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5710/** @} */
5711
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette