VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 103776

Last change on this file since 103776 was 103696, checked in by vboxsync, 9 months ago

VMM/IEM: Implement vpmaddwd instruction dispatch & emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 221.3 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 103696 2024-03-06 07:13:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145}
146
147
148/**
149 * Common worker for AVX2 instructions on the forms:
150 * - vpxxx xmm0, xmm1, xmm2/mem128
151 * - vpxxx ymm0, ymm1, ymm2/mem256
152 *
153 * Takes function table for function w/o implicit state parameter.
154 *
155 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
156 */
157FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
158{
159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
160 if (IEM_IS_MODRM_REG_MODE(bRm))
161 {
162 /*
163 * Register, register.
164 */
165 if (pVCpu->iem.s.uVexLength)
166 {
167 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_LOCAL(RTUINT256U, uDst);
170 IEM_MC_LOCAL(RTUINT256U, uSrc1);
171 IEM_MC_LOCAL(RTUINT256U, uSrc2);
172 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
173 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
178 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
180 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
181 IEM_MC_ADVANCE_RIP_AND_FINISH();
182 IEM_MC_END();
183 }
184 else
185 {
186 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_ARG(PRTUINT128U, puDst, 0);
189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_PREPARE_AVX_USAGE();
193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
194 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
195 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
197 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
198 IEM_MC_ADVANCE_RIP_AND_FINISH();
199 IEM_MC_END();
200 }
201 }
202 else
203 {
204 /*
205 * Register, memory.
206 */
207 if (pVCpu->iem.s.uVexLength)
208 {
209 IEM_MC_BEGIN(3, 4, IEM_MC_F_NOT_286_OR_OLDER, 0);
210 IEM_MC_LOCAL(RTUINT256U, uDst);
211 IEM_MC_LOCAL(RTUINT256U, uSrc1);
212 IEM_MC_LOCAL(RTUINT256U, uSrc2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
214 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
215 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
217
218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
219 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
220 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
221 IEM_MC_PREPARE_AVX_USAGE();
222
223 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
224 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
226 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
227
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
234 IEM_MC_LOCAL(RTUINT128U, uSrc2);
235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
236 IEM_MC_ARG(PRTUINT128U, puDst, 0);
237 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
238 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
239
240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
243 IEM_MC_PREPARE_AVX_USAGE();
244
245 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
247 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
248 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
249 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
250
251 IEM_MC_ADVANCE_RIP_AND_FINISH();
252 IEM_MC_END();
253 }
254 }
255}
256
257
258/**
259 * Common worker for AVX2 instructions on the forms:
260 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
261 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
262 *
263 * The 128-bit memory version of this instruction may elect to skip fetching the
264 * lower 64 bits of the operand. We, however, do not.
265 *
266 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
267 */
268FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
269{
270 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
271}
272
273
274/**
275 * Common worker for AVX2 instructions on the forms:
276 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
277 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
278 *
279 * The 128-bit memory version of this instruction may elect to skip fetching the
280 * higher 64 bits of the operand. We, however, do not.
281 *
282 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
283 */
284FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
285{
286 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
287}
288
289
290/**
291 * Common worker for AVX2 instructions on the forms:
292 * - vpxxx xmm0, xmm1/mem128
293 * - vpxxx ymm0, ymm1/mem256
294 *
295 * Takes function table for function w/o implicit state parameter.
296 *
297 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
298 */
299FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
300{
301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
302 if (IEM_IS_MODRM_REG_MODE(bRm))
303 {
304 /*
305 * Register, register.
306 */
307 if (pVCpu->iem.s.uVexLength)
308 {
309 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
311 IEM_MC_LOCAL(RTUINT256U, uDst);
312 IEM_MC_LOCAL(RTUINT256U, uSrc);
313 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
314 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
315 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
316 IEM_MC_PREPARE_AVX_USAGE();
317 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
318 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
319 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
320 IEM_MC_ADVANCE_RIP_AND_FINISH();
321 IEM_MC_END();
322 }
323 else
324 {
325 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
326 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
327 IEM_MC_ARG(PRTUINT128U, puDst, 0);
328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
330 IEM_MC_PREPARE_AVX_USAGE();
331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
332 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
333 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
334 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
335 IEM_MC_ADVANCE_RIP_AND_FINISH();
336 IEM_MC_END();
337 }
338 }
339 else
340 {
341 /*
342 * Register, memory.
343 */
344 if (pVCpu->iem.s.uVexLength)
345 {
346 IEM_MC_BEGIN(2, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
347 IEM_MC_LOCAL(RTUINT256U, uDst);
348 IEM_MC_LOCAL(RTUINT256U, uSrc);
349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
350 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
351 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
352
353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
354 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
356 IEM_MC_PREPARE_AVX_USAGE();
357
358 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
359 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
360 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
361
362 IEM_MC_ADVANCE_RIP_AND_FINISH();
363 IEM_MC_END();
364 }
365 else
366 {
367 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
368 IEM_MC_LOCAL(RTUINT128U, uSrc);
369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
370 IEM_MC_ARG(PRTUINT128U, puDst, 0);
371 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
372
373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
376 IEM_MC_PREPARE_AVX_USAGE();
377
378 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
379 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
380 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
381 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
382
383 IEM_MC_ADVANCE_RIP_AND_FINISH();
384 IEM_MC_END();
385 }
386 }
387}
388
389
390/* Opcode VEX.0F 0x00 - invalid */
391/* Opcode VEX.0F 0x01 - invalid */
392/* Opcode VEX.0F 0x02 - invalid */
393/* Opcode VEX.0F 0x03 - invalid */
394/* Opcode VEX.0F 0x04 - invalid */
395/* Opcode VEX.0F 0x05 - invalid */
396/* Opcode VEX.0F 0x06 - invalid */
397/* Opcode VEX.0F 0x07 - invalid */
398/* Opcode VEX.0F 0x08 - invalid */
399/* Opcode VEX.0F 0x09 - invalid */
400/* Opcode VEX.0F 0x0a - invalid */
401
402/** Opcode VEX.0F 0x0b. */
403FNIEMOP_DEF(iemOp_vud2)
404{
405 IEMOP_MNEMONIC(vud2, "vud2");
406 IEMOP_RAISE_INVALID_OPCODE_RET();
407}
408
409/* Opcode VEX.0F 0x0c - invalid */
410/* Opcode VEX.0F 0x0d - invalid */
411/* Opcode VEX.0F 0x0e - invalid */
412/* Opcode VEX.0F 0x0f - invalid */
413
414
415/**
416 * @opcode 0x10
417 * @oppfx none
418 * @opcpuid avx
419 * @opgroup og_avx_simdfp_datamove
420 * @opxcpttype 4UA
421 * @optest op1=1 op2=2 -> op1=2
422 * @optest op1=0 op2=-22 -> op1=-22
423 */
424FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
425{
426 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
427 Assert(pVCpu->iem.s.uVexLength <= 1);
428 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
429 if (IEM_IS_MODRM_REG_MODE(bRm))
430 {
431 /*
432 * Register, register.
433 */
434 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
438 if (pVCpu->iem.s.uVexLength == 0)
439 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
440 IEM_GET_MODRM_RM(pVCpu, bRm));
441 else
442 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 IEM_MC_ADVANCE_RIP_AND_FINISH();
445 IEM_MC_END();
446 }
447 else if (pVCpu->iem.s.uVexLength == 0)
448 {
449 /*
450 * 128-bit: Register, Memory
451 */
452 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
453 IEM_MC_LOCAL(RTUINT128U, uSrc);
454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455
456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
460
461 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
463
464 IEM_MC_ADVANCE_RIP_AND_FINISH();
465 IEM_MC_END();
466 }
467 else
468 {
469 /*
470 * 256-bit: Register, Memory
471 */
472 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEM_MC_LOCAL(RTUINT256U, uSrc);
474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
480
481 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
483
484 IEM_MC_ADVANCE_RIP_AND_FINISH();
485 IEM_MC_END();
486 }
487}
488
489
490/**
491 * @opcode 0x10
492 * @oppfx 0x66
493 * @opcpuid avx
494 * @opgroup og_avx_simdfp_datamove
495 * @opxcpttype 4UA
496 * @optest op1=1 op2=2 -> op1=2
497 * @optest op1=0 op2=-22 -> op1=-22
498 */
499FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
500{
501 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
502 Assert(pVCpu->iem.s.uVexLength <= 1);
503 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
504 if (IEM_IS_MODRM_REG_MODE(bRm))
505 {
506 /*
507 * Register, register.
508 */
509 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
513 if (pVCpu->iem.s.uVexLength == 0)
514 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
515 IEM_GET_MODRM_RM(pVCpu, bRm));
516 else
517 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
518 IEM_GET_MODRM_RM(pVCpu, bRm));
519 IEM_MC_ADVANCE_RIP_AND_FINISH();
520 IEM_MC_END();
521 }
522 else if (pVCpu->iem.s.uVexLength == 0)
523 {
524 /*
525 * 128-bit: Memory, register.
526 */
527 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
528 IEM_MC_LOCAL(RTUINT128U, uSrc);
529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
530
531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
535
536 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
537 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
538
539 IEM_MC_ADVANCE_RIP_AND_FINISH();
540 IEM_MC_END();
541 }
542 else
543 {
544 /*
545 * 256-bit: Memory, register.
546 */
547 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
548 IEM_MC_LOCAL(RTUINT256U, uSrc);
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
550
551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
555
556 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
557 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
558
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562}
563
564
565FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
566{
567 Assert(pVCpu->iem.s.uVexLength <= 1);
568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
569 if (IEM_IS_MODRM_REG_MODE(bRm))
570 {
571 /**
572 * @opcode 0x10
573 * @oppfx 0xf3
574 * @opcodesub 11 mr/reg
575 * @opcpuid avx
576 * @opgroup og_avx_simdfp_datamerge
577 * @opxcpttype 5
578 * @optest op1=1 op2=0 op3=2 -> op1=2
579 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
580 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
581 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
582 * @note HssHi refers to bits 127:32.
583 */
584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
585 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
586 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
589 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
590 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
591 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
592 IEM_MC_ADVANCE_RIP_AND_FINISH();
593 IEM_MC_END();
594 }
595 else
596 {
597 /**
598 * @opdone
599 * @opcode 0x10
600 * @oppfx 0xf3
601 * @opcodesub !11 mr/reg
602 * @opcpuid avx
603 * @opgroup og_avx_simdfp_datamove
604 * @opxcpttype 5
605 * @opfunction iemOp_vmovss_Vss_Hss_Wss
606 * @optest op1=1 op2=2 -> op1=2
607 * @optest op1=0 op2=-22 -> op1=-22
608 */
609 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
610 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
611 IEM_MC_LOCAL(uint32_t, uSrc);
612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
613
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
618
619 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
620 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
621
622 IEM_MC_ADVANCE_RIP_AND_FINISH();
623 IEM_MC_END();
624 }
625}
626
627
628FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
629{
630 Assert(pVCpu->iem.s.uVexLength <= 1);
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (IEM_IS_MODRM_REG_MODE(bRm))
633 {
634 /**
635 * @opcode 0x10
636 * @oppfx 0xf2
637 * @opcodesub 11 mr/reg
638 * @opcpuid avx
639 * @opgroup og_avx_simdfp_datamerge
640 * @opxcpttype 5
641 * @optest op1=1 op2=0 op3=2 -> op1=2
642 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
643 * @optest op1=3 op2=-1 op3=0x77 ->
644 * op1=0xffffffffffffffff0000000000000077
645 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
646 */
647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
648 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
649 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
650
651 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
653 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
654 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
655 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /**
662 * @opdone
663 * @opcode 0x10
664 * @oppfx 0xf2
665 * @opcodesub !11 mr/reg
666 * @opcpuid avx
667 * @opgroup og_avx_simdfp_datamove
668 * @opxcpttype 5
669 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
670 * @optest op1=1 op2=2 -> op1=2
671 * @optest op1=0 op2=-22 -> op1=-22
672 */
673 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
674 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
675 IEM_MC_LOCAL(uint64_t, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689}
690
691
692/**
693 * @opcode 0x11
694 * @oppfx none
695 * @opcpuid avx
696 * @opgroup og_avx_simdfp_datamove
697 * @opxcpttype 4UA
698 * @optest op1=1 op2=2 -> op1=2
699 * @optest op1=0 op2=-22 -> op1=-22
700 */
701FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
702{
703 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
704 Assert(pVCpu->iem.s.uVexLength <= 1);
705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
706 if (IEM_IS_MODRM_REG_MODE(bRm))
707 {
708 /*
709 * Register, register.
710 */
711 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
714 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
715 if (pVCpu->iem.s.uVexLength == 0)
716 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
717 IEM_GET_MODRM_REG(pVCpu, bRm));
718 else
719 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
720 IEM_GET_MODRM_REG(pVCpu, bRm));
721 IEM_MC_ADVANCE_RIP_AND_FINISH();
722 IEM_MC_END();
723 }
724 else if (pVCpu->iem.s.uVexLength == 0)
725 {
726 /*
727 * 128-bit: Memory, register.
728 */
729 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
730 IEM_MC_LOCAL(RTUINT128U, uSrc);
731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
732
733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
737
738 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
739 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
740
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else
745 {
746 /*
747 * 256-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
750 IEM_MC_LOCAL(RTUINT256U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
757
758 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
759 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764}
765
766
767/**
768 * @opcode 0x11
769 * @oppfx 0x66
770 * @opcpuid avx
771 * @opgroup og_avx_simdfp_datamove
772 * @opxcpttype 4UA
773 * @optest op1=1 op2=2 -> op1=2
774 * @optest op1=0 op2=-22 -> op1=-22
775 */
776FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
777{
778 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
779 Assert(pVCpu->iem.s.uVexLength <= 1);
780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
781 if (IEM_IS_MODRM_REG_MODE(bRm))
782 {
783 /*
784 * Register, register.
785 */
786 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
789 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
790 if (pVCpu->iem.s.uVexLength == 0)
791 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
792 IEM_GET_MODRM_REG(pVCpu, bRm));
793 else
794 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
795 IEM_GET_MODRM_REG(pVCpu, bRm));
796 IEM_MC_ADVANCE_RIP_AND_FINISH();
797 IEM_MC_END();
798 }
799 else if (pVCpu->iem.s.uVexLength == 0)
800 {
801 /*
802 * 128-bit: Memory, register.
803 */
804 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807
808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
812
813 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
814 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
815
816 IEM_MC_ADVANCE_RIP_AND_FINISH();
817 IEM_MC_END();
818 }
819 else
820 {
821 /*
822 * 256-bit: Memory, register.
823 */
824 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
825 IEM_MC_LOCAL(RTUINT256U, uSrc);
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
827
828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
832
833 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
834 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
835
836 IEM_MC_ADVANCE_RIP_AND_FINISH();
837 IEM_MC_END();
838 }
839}
840
841
842FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
843{
844 Assert(pVCpu->iem.s.uVexLength <= 1);
845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
846 if (IEM_IS_MODRM_REG_MODE(bRm))
847 {
848 /**
849 * @opcode 0x11
850 * @oppfx 0xf3
851 * @opcodesub 11 mr/reg
852 * @opcpuid avx
853 * @opgroup og_avx_simdfp_datamerge
854 * @opxcpttype 5
855 * @optest op1=1 op2=0 op3=2 -> op1=2
856 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
857 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
858 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
859 */
860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
861 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
862 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
863
864 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
865 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
866 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
867 IEM_GET_MODRM_REG(pVCpu, bRm),
868 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
869 IEM_MC_ADVANCE_RIP_AND_FINISH();
870 IEM_MC_END();
871 }
872 else
873 {
874 /**
875 * @opdone
876 * @opcode 0x11
877 * @oppfx 0xf3
878 * @opcodesub !11 mr/reg
879 * @opcpuid avx
880 * @opgroup og_avx_simdfp_datamove
881 * @opxcpttype 5
882 * @opfunction iemOp_vmovss_Vss_Hss_Wss
883 * @optest op1=1 op2=2 -> op1=2
884 * @optest op1=0 op2=-22 -> op1=-22
885 */
886 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
887 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
888 IEM_MC_LOCAL(uint32_t, uSrc);
889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
890
891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
895
896 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
898
899 IEM_MC_ADVANCE_RIP_AND_FINISH();
900 IEM_MC_END();
901 }
902}
903
904
905FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
906{
907 Assert(pVCpu->iem.s.uVexLength <= 1);
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 if (IEM_IS_MODRM_REG_MODE(bRm))
910 {
911 /**
912 * @opcode 0x11
913 * @oppfx 0xf2
914 * @opcodesub 11 mr/reg
915 * @opcpuid avx
916 * @opgroup og_avx_simdfp_datamerge
917 * @opxcpttype 5
918 * @optest op1=1 op2=0 op3=2 -> op1=2
919 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
920 * @optest op1=3 op2=-1 op3=0x77 ->
921 * op1=0xffffffffffffffff0000000000000077
922 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
923 */
924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
925 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
926 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
927
928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
930 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
931 IEM_GET_MODRM_REG(pVCpu, bRm),
932 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
933 IEM_MC_ADVANCE_RIP_AND_FINISH();
934 IEM_MC_END();
935 }
936 else
937 {
938 /**
939 * @opdone
940 * @opcode 0x11
941 * @oppfx 0xf2
942 * @opcodesub !11 mr/reg
943 * @opcpuid avx
944 * @opgroup og_avx_simdfp_datamove
945 * @opxcpttype 5
946 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
947 * @optest op1=1 op2=2 -> op1=2
948 * @optest op1=0 op2=-22 -> op1=-22
949 */
950 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
951 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(uint64_t, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966}
967
968
969FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
970{
971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
972 if (IEM_IS_MODRM_REG_MODE(bRm))
973 {
974 /**
975 * @opcode 0x12
976 * @opcodesub 11 mr/reg
977 * @oppfx none
978 * @opcpuid avx
979 * @opgroup og_avx_simdfp_datamerge
980 * @opxcpttype 7LZ
981 * @optest op2=0x2200220122022203
982 * op3=0x3304330533063307
983 * -> op1=0x22002201220222033304330533063307
984 * @optest op2=-1 op3=-42 -> op1=-42
985 * @note op3 and op2 are only the 8-byte high XMM register halfs.
986 */
987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
988 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
990
991 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
992 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
993 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
994 IEM_GET_MODRM_RM(pVCpu, bRm),
995 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
996
997 IEM_MC_ADVANCE_RIP_AND_FINISH();
998 IEM_MC_END();
999 }
1000 else
1001 {
1002 /**
1003 * @opdone
1004 * @opcode 0x12
1005 * @opcodesub !11 mr/reg
1006 * @oppfx none
1007 * @opcpuid avx
1008 * @opgroup og_avx_simdfp_datamove
1009 * @opxcpttype 5LZ
1010 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1011 * @optest op1=1 op2=0 op3=0 -> op1=0
1012 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1013 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1014 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1015 */
1016 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1017
1018 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEM_MC_LOCAL(uint64_t, uSrc);
1020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1021
1022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1023 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1024 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1025 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1026
1027 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1028 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1029 uSrc,
1030 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1031
1032 IEM_MC_ADVANCE_RIP_AND_FINISH();
1033 IEM_MC_END();
1034 }
1035}
1036
1037
1038/**
1039 * @opcode 0x12
1040 * @opcodesub !11 mr/reg
1041 * @oppfx 0x66
1042 * @opcpuid avx
1043 * @opgroup og_avx_pcksclr_datamerge
1044 * @opxcpttype 5LZ
1045 * @optest op2=0 op3=2 -> op1=2
1046 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1047 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1048 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1049 */
1050FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1051{
1052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1053 if (IEM_IS_MODRM_MEM_MODE(bRm))
1054 {
1055 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1056
1057 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1058 IEM_MC_LOCAL(uint64_t, uSrc);
1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1060
1061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1062 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1065
1066 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1067 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1068 uSrc,
1069 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1070
1071 IEM_MC_ADVANCE_RIP_AND_FINISH();
1072 IEM_MC_END();
1073 }
1074
1075 /**
1076 * @opdone
1077 * @opmnemonic udvex660f12m3
1078 * @opcode 0x12
1079 * @opcodesub 11 mr/reg
1080 * @oppfx 0x66
1081 * @opunused immediate
1082 * @opcpuid avx
1083 * @optest ->
1084 */
1085 else
1086 IEMOP_RAISE_INVALID_OPCODE_RET();
1087}
1088
1089
1090/**
1091 * @opcode 0x12
1092 * @oppfx 0xf3
1093 * @opcpuid avx
1094 * @opgroup og_avx_pcksclr_datamove
1095 * @opxcpttype 4
1096 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1097 * -> op1=0x00000002000000020000000100000001
1098 * @optest vex.l==1 /
1099 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1100 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1101 */
1102FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1103{
1104 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1105 Assert(pVCpu->iem.s.uVexLength <= 1);
1106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1107 if (IEM_IS_MODRM_REG_MODE(bRm))
1108 {
1109 /*
1110 * Register, register.
1111 */
1112 if (pVCpu->iem.s.uVexLength == 0)
1113 {
1114 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1115 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1116 IEM_MC_LOCAL(RTUINT128U, uSrc);
1117
1118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1119 IEM_MC_PREPARE_AVX_USAGE();
1120
1121 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1122 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1123 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1124 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1125 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1126 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1127
1128 IEM_MC_ADVANCE_RIP_AND_FINISH();
1129 IEM_MC_END();
1130 }
1131 else
1132 {
1133 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1134 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1135 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1136 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1137 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1138
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1142
1143 IEM_MC_ADVANCE_RIP_AND_FINISH();
1144 IEM_MC_END();
1145 }
1146 }
1147 else
1148 {
1149 /*
1150 * Register, memory.
1151 */
1152 if (pVCpu->iem.s.uVexLength == 0)
1153 {
1154 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1155 IEM_MC_LOCAL(RTUINT128U, uSrc);
1156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1157
1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1161 IEM_MC_PREPARE_AVX_USAGE();
1162
1163 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1164 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1165 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1166 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1167 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1168 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1169
1170 IEM_MC_ADVANCE_RIP_AND_FINISH();
1171 IEM_MC_END();
1172 }
1173 else
1174 {
1175 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1176 IEM_MC_LOCAL(RTUINT256U, uSrc);
1177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1178 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1179 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1185 IEM_MC_PREPARE_AVX_USAGE();
1186
1187 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1188 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1189
1190 IEM_MC_ADVANCE_RIP_AND_FINISH();
1191 IEM_MC_END();
1192 }
1193 }
1194}
1195
1196
1197/**
1198 * @opcode 0x12
1199 * @oppfx 0xf2
1200 * @opcpuid avx
1201 * @opgroup og_avx_pcksclr_datamove
1202 * @opxcpttype 5
1203 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1204 * -> op1=0x22222222111111112222222211111111
1205 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1206 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1207 */
1208FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1209{
1210 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 if (pVCpu->iem.s.uVexLength == 0)
1218 {
1219 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1221 IEM_MC_LOCAL(uint64_t, uSrc);
1222
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1227 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1229 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1230
1231 IEM_MC_ADVANCE_RIP_AND_FINISH();
1232 IEM_MC_END();
1233 }
1234 else
1235 {
1236 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1238 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1239 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1240 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1241
1242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1243 IEM_MC_PREPARE_AVX_USAGE();
1244 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1245
1246 IEM_MC_ADVANCE_RIP_AND_FINISH();
1247 IEM_MC_END();
1248 }
1249 }
1250 else
1251 {
1252 /*
1253 * Register, memory.
1254 */
1255 if (pVCpu->iem.s.uVexLength == 0)
1256 {
1257 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259 IEM_MC_LOCAL(uint64_t, uSrc);
1260
1261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1262 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1264 IEM_MC_PREPARE_AVX_USAGE();
1265
1266 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1267 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1268 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1269 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1270
1271 IEM_MC_ADVANCE_RIP_AND_FINISH();
1272 IEM_MC_END();
1273 }
1274 else
1275 {
1276 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1277 IEM_MC_LOCAL(RTUINT256U, uSrc);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1280 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1282
1283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1286 IEM_MC_PREPARE_AVX_USAGE();
1287
1288 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1290
1291 IEM_MC_ADVANCE_RIP_AND_FINISH();
1292 IEM_MC_END();
1293 }
1294 }
1295}
1296
1297
1298/**
1299 * @opcode 0x13
1300 * @opcodesub !11 mr/reg
1301 * @oppfx none
1302 * @opcpuid avx
1303 * @opgroup og_avx_simdfp_datamove
1304 * @opxcpttype 5
1305 * @optest op1=1 op2=2 -> op1=2
1306 * @optest op1=0 op2=-42 -> op1=-42
1307 */
1308FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if (IEM_IS_MODRM_MEM_MODE(bRm))
1312 {
1313 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1314
1315 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1316 IEM_MC_LOCAL(uint64_t, uSrc);
1317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1318
1319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1320 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1323
1324 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1326
1327 IEM_MC_ADVANCE_RIP_AND_FINISH();
1328 IEM_MC_END();
1329 }
1330
1331 /**
1332 * @opdone
1333 * @opmnemonic udvex0f13m3
1334 * @opcode 0x13
1335 * @opcodesub 11 mr/reg
1336 * @oppfx none
1337 * @opunused immediate
1338 * @opcpuid avx
1339 * @optest ->
1340 */
1341 else
1342 IEMOP_RAISE_INVALID_OPCODE_RET();
1343}
1344
1345
1346/**
1347 * @opcode 0x13
1348 * @opcodesub !11 mr/reg
1349 * @oppfx 0x66
1350 * @opcpuid avx
1351 * @opgroup og_avx_pcksclr_datamove
1352 * @opxcpttype 5
1353 * @optest op1=1 op2=2 -> op1=2
1354 * @optest op1=0 op2=-42 -> op1=-42
1355 */
1356FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1357{
1358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1359 if (IEM_IS_MODRM_MEM_MODE(bRm))
1360 {
1361 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1362 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1363 IEM_MC_LOCAL(uint64_t, uSrc);
1364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1365
1366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1368 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1369 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1370
1371 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1372 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1373
1374 IEM_MC_ADVANCE_RIP_AND_FINISH();
1375 IEM_MC_END();
1376 }
1377
1378 /**
1379 * @opdone
1380 * @opmnemonic udvex660f13m3
1381 * @opcode 0x13
1382 * @opcodesub 11 mr/reg
1383 * @oppfx 0x66
1384 * @opunused immediate
1385 * @opcpuid avx
1386 * @optest ->
1387 */
1388 else
1389 IEMOP_RAISE_INVALID_OPCODE_RET();
1390}
1391
1392/* Opcode VEX.F3.0F 0x13 - invalid */
1393/* Opcode VEX.F2.0F 0x13 - invalid */
1394
1395/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1396FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1397{
1398 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1399 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1400 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1401}
1402
1403
1404/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1405FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1406{
1407 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1408 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1410}
1411
1412
1413/* Opcode VEX.F3.0F 0x14 - invalid */
1414/* Opcode VEX.F2.0F 0x14 - invalid */
1415
1416
1417/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1418FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1419{
1420 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1421 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1422 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1423}
1424
1425
1426/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1427FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1428{
1429 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1430 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1431 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1432}
1433
1434
1435/* Opcode VEX.F3.0F 0x15 - invalid */
1436/* Opcode VEX.F2.0F 0x15 - invalid */
1437
1438
1439FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1440{
1441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1442 if (IEM_IS_MODRM_REG_MODE(bRm))
1443 {
1444 /**
1445 * @opcode 0x16
1446 * @opcodesub 11 mr/reg
1447 * @oppfx none
1448 * @opcpuid avx
1449 * @opgroup og_avx_simdfp_datamerge
1450 * @opxcpttype 7LZ
1451 */
1452 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1453
1454 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1455 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1456
1457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1458 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1459 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1460 IEM_GET_MODRM_RM(pVCpu, bRm),
1461 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1462
1463 IEM_MC_ADVANCE_RIP_AND_FINISH();
1464 IEM_MC_END();
1465 }
1466 else
1467 {
1468 /**
1469 * @opdone
1470 * @opcode 0x16
1471 * @opcodesub !11 mr/reg
1472 * @oppfx none
1473 * @opcpuid avx
1474 * @opgroup og_avx_simdfp_datamove
1475 * @opxcpttype 5LZ
1476 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1477 */
1478 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1479
1480 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1481 IEM_MC_LOCAL(uint64_t, uSrc);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483
1484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1485 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1486 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1487 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1488
1489 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1490 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1491 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1492 uSrc);
1493
1494 IEM_MC_ADVANCE_RIP_AND_FINISH();
1495 IEM_MC_END();
1496 }
1497}
1498
1499
1500/**
1501 * @opcode 0x16
1502 * @opcodesub !11 mr/reg
1503 * @oppfx 0x66
1504 * @opcpuid avx
1505 * @opgroup og_avx_pcksclr_datamerge
1506 * @opxcpttype 5LZ
1507 */
1508FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1509{
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if (IEM_IS_MODRM_MEM_MODE(bRm))
1512 {
1513 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1514
1515 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1516 IEM_MC_LOCAL(uint64_t, uSrc);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1523
1524 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1525 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1526 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1527 uSrc);
1528
1529 IEM_MC_ADVANCE_RIP_AND_FINISH();
1530 IEM_MC_END();
1531 }
1532
1533 /**
1534 * @opdone
1535 * @opmnemonic udvex660f16m3
1536 * @opcode 0x12
1537 * @opcodesub 11 mr/reg
1538 * @oppfx 0x66
1539 * @opunused immediate
1540 * @opcpuid avx
1541 * @optest ->
1542 */
1543 else
1544 IEMOP_RAISE_INVALID_OPCODE_RET();
1545}
1546
1547
1548/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1549/**
1550 * @opcode 0x16
1551 * @oppfx 0xf3
1552 * @opcpuid avx
1553 * @opgroup og_avx_pcksclr_datamove
1554 * @opxcpttype 4
1555 */
1556FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1557{
1558 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1559 Assert(pVCpu->iem.s.uVexLength <= 1);
1560 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1561 if (IEM_IS_MODRM_REG_MODE(bRm))
1562 {
1563 /*
1564 * Register, register.
1565 */
1566 if (pVCpu->iem.s.uVexLength == 0)
1567 {
1568 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1569 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1570 IEM_MC_LOCAL(RTUINT128U, uSrc);
1571
1572 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1573 IEM_MC_PREPARE_AVX_USAGE();
1574
1575 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1576 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1577 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1579 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1580 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1581
1582 IEM_MC_ADVANCE_RIP_AND_FINISH();
1583 IEM_MC_END();
1584 }
1585 else
1586 {
1587 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1588 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1589 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1590 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1591 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1596
1597 IEM_MC_ADVANCE_RIP_AND_FINISH();
1598 IEM_MC_END();
1599 }
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 if (pVCpu->iem.s.uVexLength == 0)
1607 {
1608 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1609 IEM_MC_LOCAL(RTUINT128U, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1620 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1621 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1622 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1623
1624 IEM_MC_ADVANCE_RIP_AND_FINISH();
1625 IEM_MC_END();
1626 }
1627 else
1628 {
1629 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1630 IEM_MC_LOCAL(RTUINT256U, uSrc);
1631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1632 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1633 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1634 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1635
1636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1637 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1638 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1639 IEM_MC_PREPARE_AVX_USAGE();
1640
1641 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1642 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1643
1644 IEM_MC_ADVANCE_RIP_AND_FINISH();
1645 IEM_MC_END();
1646 }
1647 }
1648}
1649
1650
1651/* Opcode VEX.F2.0F 0x16 - invalid */
1652
1653
1654/**
1655 * @opcode 0x17
1656 * @opcodesub !11 mr/reg
1657 * @oppfx none
1658 * @opcpuid avx
1659 * @opgroup og_avx_simdfp_datamove
1660 * @opxcpttype 5
1661 */
1662FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1663{
1664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1665 if (IEM_IS_MODRM_MEM_MODE(bRm))
1666 {
1667 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1668
1669 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1670 IEM_MC_LOCAL(uint64_t, uSrc);
1671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1672
1673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1674 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1677
1678 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1679 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1680
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684
1685 /**
1686 * @opdone
1687 * @opmnemonic udvex0f17m3
1688 * @opcode 0x17
1689 * @opcodesub 11 mr/reg
1690 * @oppfx none
1691 * @opunused immediate
1692 * @opcpuid avx
1693 * @optest ->
1694 */
1695 else
1696 IEMOP_RAISE_INVALID_OPCODE_RET();
1697}
1698
1699
1700/**
1701 * @opcode 0x17
1702 * @opcodesub !11 mr/reg
1703 * @oppfx 0x66
1704 * @opcpuid avx
1705 * @opgroup og_avx_pcksclr_datamove
1706 * @opxcpttype 5
1707 */
1708FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1709{
1710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1711 if (IEM_IS_MODRM_MEM_MODE(bRm))
1712 {
1713 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1714
1715 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1716 IEM_MC_LOCAL(uint64_t, uSrc);
1717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1718
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1721 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1722 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1723
1724 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1725 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1726
1727 IEM_MC_ADVANCE_RIP_AND_FINISH();
1728 IEM_MC_END();
1729 }
1730
1731 /**
1732 * @opdone
1733 * @opmnemonic udvex660f17m3
1734 * @opcode 0x17
1735 * @opcodesub 11 mr/reg
1736 * @oppfx 0x66
1737 * @opunused immediate
1738 * @opcpuid avx
1739 * @optest ->
1740 */
1741 else
1742 IEMOP_RAISE_INVALID_OPCODE_RET();
1743}
1744
1745
1746/* Opcode VEX.F3.0F 0x17 - invalid */
1747/* Opcode VEX.F2.0F 0x17 - invalid */
1748
1749
1750/* Opcode VEX.0F 0x18 - invalid */
1751/* Opcode VEX.0F 0x19 - invalid */
1752/* Opcode VEX.0F 0x1a - invalid */
1753/* Opcode VEX.0F 0x1b - invalid */
1754/* Opcode VEX.0F 0x1c - invalid */
1755/* Opcode VEX.0F 0x1d - invalid */
1756/* Opcode VEX.0F 0x1e - invalid */
1757/* Opcode VEX.0F 0x1f - invalid */
1758
1759/* Opcode VEX.0F 0x20 - invalid */
1760/* Opcode VEX.0F 0x21 - invalid */
1761/* Opcode VEX.0F 0x22 - invalid */
1762/* Opcode VEX.0F 0x23 - invalid */
1763/* Opcode VEX.0F 0x24 - invalid */
1764/* Opcode VEX.0F 0x25 - invalid */
1765/* Opcode VEX.0F 0x26 - invalid */
1766/* Opcode VEX.0F 0x27 - invalid */
1767
1768/**
1769 * @opcode 0x28
1770 * @oppfx none
1771 * @opcpuid avx
1772 * @opgroup og_avx_pcksclr_datamove
1773 * @opxcpttype 1
1774 * @optest op1=1 op2=2 -> op1=2
1775 * @optest op1=0 op2=-42 -> op1=-42
1776 * @note Almost identical to vmovapd.
1777 */
1778FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1779{
1780 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1782 Assert(pVCpu->iem.s.uVexLength <= 1);
1783 if (IEM_IS_MODRM_REG_MODE(bRm))
1784 {
1785 /*
1786 * Register, register.
1787 */
1788 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1789 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1790
1791 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1792 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1793 if (pVCpu->iem.s.uVexLength == 0)
1794 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1795 IEM_GET_MODRM_RM(pVCpu, bRm));
1796 else
1797 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1798 IEM_GET_MODRM_RM(pVCpu, bRm));
1799 IEM_MC_ADVANCE_RIP_AND_FINISH();
1800 IEM_MC_END();
1801 }
1802 else
1803 {
1804 /*
1805 * Register, memory.
1806 */
1807 if (pVCpu->iem.s.uVexLength == 0)
1808 {
1809 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1811 IEM_MC_LOCAL(RTUINT128U, uSrc);
1812
1813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1814 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1816 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1817
1818 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1819 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1820
1821 IEM_MC_ADVANCE_RIP_AND_FINISH();
1822 IEM_MC_END();
1823 }
1824 else
1825 {
1826 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1828 IEM_MC_LOCAL(RTUINT256U, uSrc);
1829
1830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1831 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1832 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1833 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1834
1835 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1836 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1837
1838 IEM_MC_ADVANCE_RIP_AND_FINISH();
1839 IEM_MC_END();
1840 }
1841 }
1842}
1843
1844
1845/**
1846 * @opcode 0x28
1847 * @oppfx 66
1848 * @opcpuid avx
1849 * @opgroup og_avx_pcksclr_datamove
1850 * @opxcpttype 1
1851 * @optest op1=1 op2=2 -> op1=2
1852 * @optest op1=0 op2=-42 -> op1=-42
1853 * @note Almost identical to vmovaps
1854 */
1855FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1856{
1857 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1859 Assert(pVCpu->iem.s.uVexLength <= 1);
1860 if (IEM_IS_MODRM_REG_MODE(bRm))
1861 {
1862 /*
1863 * Register, register.
1864 */
1865 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1866 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1867
1868 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1869 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1870 if (pVCpu->iem.s.uVexLength == 0)
1871 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1872 IEM_GET_MODRM_RM(pVCpu, bRm));
1873 else
1874 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1875 IEM_GET_MODRM_RM(pVCpu, bRm));
1876 IEM_MC_ADVANCE_RIP_AND_FINISH();
1877 IEM_MC_END();
1878 }
1879 else
1880 {
1881 /*
1882 * Register, memory.
1883 */
1884 if (pVCpu->iem.s.uVexLength == 0)
1885 {
1886 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1888 IEM_MC_LOCAL(RTUINT128U, uSrc);
1889
1890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1891 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1893 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1894
1895 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1896 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1897
1898 IEM_MC_ADVANCE_RIP_AND_FINISH();
1899 IEM_MC_END();
1900 }
1901 else
1902 {
1903 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905 IEM_MC_LOCAL(RTUINT256U, uSrc);
1906
1907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1908 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1910 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1911
1912 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1913 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1914
1915 IEM_MC_ADVANCE_RIP_AND_FINISH();
1916 IEM_MC_END();
1917 }
1918 }
1919}
1920
1921/**
1922 * @opmnemonic udvexf30f28
1923 * @opcode 0x28
1924 * @oppfx 0xf3
1925 * @opunused vex.modrm
1926 * @opcpuid avx
1927 * @optest ->
1928 * @opdone
1929 */
1930
1931/**
1932 * @opmnemonic udvexf20f28
1933 * @opcode 0x28
1934 * @oppfx 0xf2
1935 * @opunused vex.modrm
1936 * @opcpuid avx
1937 * @optest ->
1938 * @opdone
1939 */
1940
1941/**
1942 * @opcode 0x29
1943 * @oppfx none
1944 * @opcpuid avx
1945 * @opgroup og_avx_pcksclr_datamove
1946 * @opxcpttype 1
1947 * @optest op1=1 op2=2 -> op1=2
1948 * @optest op1=0 op2=-42 -> op1=-42
1949 * @note Almost identical to vmovapd.
1950 */
1951FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1952{
1953 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1955 Assert(pVCpu->iem.s.uVexLength <= 1);
1956 if (IEM_IS_MODRM_REG_MODE(bRm))
1957 {
1958 /*
1959 * Register, register.
1960 */
1961 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1962 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1963
1964 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1965 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1966 if (pVCpu->iem.s.uVexLength == 0)
1967 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1968 IEM_GET_MODRM_REG(pVCpu, bRm));
1969 else
1970 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1971 IEM_GET_MODRM_REG(pVCpu, bRm));
1972 IEM_MC_ADVANCE_RIP_AND_FINISH();
1973 IEM_MC_END();
1974 }
1975 else
1976 {
1977 /*
1978 * Register, memory.
1979 */
1980 if (pVCpu->iem.s.uVexLength == 0)
1981 {
1982 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1984 IEM_MC_LOCAL(RTUINT128U, uSrc);
1985
1986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1987 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1989 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1990
1991 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1993
1994 IEM_MC_ADVANCE_RIP_AND_FINISH();
1995 IEM_MC_END();
1996 }
1997 else
1998 {
1999 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2001 IEM_MC_LOCAL(RTUINT256U, uSrc);
2002
2003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2004 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2007
2008 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2009 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP_AND_FINISH();
2012 IEM_MC_END();
2013 }
2014 }
2015}
2016
2017/**
2018 * @opcode 0x29
2019 * @oppfx 66
2020 * @opcpuid avx
2021 * @opgroup og_avx_pcksclr_datamove
2022 * @opxcpttype 1
2023 * @optest op1=1 op2=2 -> op1=2
2024 * @optest op1=0 op2=-42 -> op1=-42
2025 * @note Almost identical to vmovaps
2026 */
2027FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2028{
2029 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2030 Assert(pVCpu->iem.s.uVexLength <= 1);
2031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2032 if (IEM_IS_MODRM_REG_MODE(bRm))
2033 {
2034 /*
2035 * Register, register.
2036 */
2037 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
2038 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2039
2040 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2041 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2042 if (pVCpu->iem.s.uVexLength == 0)
2043 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2044 IEM_GET_MODRM_REG(pVCpu, bRm));
2045 else
2046 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2047 IEM_GET_MODRM_REG(pVCpu, bRm));
2048 IEM_MC_ADVANCE_RIP_AND_FINISH();
2049 IEM_MC_END();
2050 }
2051 else
2052 {
2053 /*
2054 * Register, memory.
2055 */
2056 if (pVCpu->iem.s.uVexLength == 0)
2057 {
2058 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2060 IEM_MC_LOCAL(RTUINT128U, uSrc);
2061
2062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2063 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2066
2067 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2068 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2069
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 else
2074 {
2075 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2077 IEM_MC_LOCAL(RTUINT256U, uSrc);
2078
2079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2080 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2083
2084 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2085 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2086
2087 IEM_MC_ADVANCE_RIP_AND_FINISH();
2088 IEM_MC_END();
2089 }
2090 }
2091}
2092
2093
2094/**
2095 * @opmnemonic udvexf30f29
2096 * @opcode 0x29
2097 * @oppfx 0xf3
2098 * @opunused vex.modrm
2099 * @opcpuid avx
2100 * @optest ->
2101 * @opdone
2102 */
2103
2104/**
2105 * @opmnemonic udvexf20f29
2106 * @opcode 0x29
2107 * @oppfx 0xf2
2108 * @opunused vex.modrm
2109 * @opcpuid avx
2110 * @optest ->
2111 * @opdone
2112 */
2113
2114
2115/** Opcode VEX.0F 0x2a - invalid */
2116/** Opcode VEX.66.0F 0x2a - invalid */
2117/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2118FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2119/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2120FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2121
2122
2123/**
2124 * @opcode 0x2b
2125 * @opcodesub !11 mr/reg
2126 * @oppfx none
2127 * @opcpuid avx
2128 * @opgroup og_avx_cachect
2129 * @opxcpttype 1
2130 * @optest op1=1 op2=2 -> op1=2
2131 * @optest op1=0 op2=-42 -> op1=-42
2132 * @note Identical implementation to vmovntpd
2133 */
2134FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2135{
2136 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2137 Assert(pVCpu->iem.s.uVexLength <= 1);
2138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2139 if (IEM_IS_MODRM_MEM_MODE(bRm))
2140 {
2141 /*
2142 * memory, register.
2143 */
2144 if (pVCpu->iem.s.uVexLength == 0)
2145 {
2146 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2147 IEM_MC_LOCAL(RTUINT128U, uSrc);
2148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2149
2150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2151 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2152 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2153 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2154
2155 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2156 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2157
2158 IEM_MC_ADVANCE_RIP_AND_FINISH();
2159 IEM_MC_END();
2160 }
2161 else
2162 {
2163 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2164 IEM_MC_LOCAL(RTUINT256U, uSrc);
2165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2166
2167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2168 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2169 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2170 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2171
2172 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2173 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2174
2175 IEM_MC_ADVANCE_RIP_AND_FINISH();
2176 IEM_MC_END();
2177 }
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 IEMOP_RAISE_INVALID_OPCODE_RET();
2182}
2183
2184/**
2185 * @opcode 0x2b
2186 * @opcodesub !11 mr/reg
2187 * @oppfx 0x66
2188 * @opcpuid avx
2189 * @opgroup og_avx_cachect
2190 * @opxcpttype 1
2191 * @optest op1=1 op2=2 -> op1=2
2192 * @optest op1=0 op2=-42 -> op1=-42
2193 * @note Identical implementation to vmovntps
2194 */
2195FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2196{
2197 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2198 Assert(pVCpu->iem.s.uVexLength <= 1);
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 if (IEM_IS_MODRM_MEM_MODE(bRm))
2201 {
2202 /*
2203 * memory, register.
2204 */
2205 if (pVCpu->iem.s.uVexLength == 0)
2206 {
2207 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2208 IEM_MC_LOCAL(RTUINT128U, uSrc);
2209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2210
2211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2212 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2215
2216 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2217 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2218
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 }
2222 else
2223 {
2224 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2225 IEM_MC_LOCAL(RTUINT256U, uSrc);
2226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2227
2228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2229 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2232
2233 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2234 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2235
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 }
2239 }
2240 /* The register, register encoding is invalid. */
2241 else
2242 IEMOP_RAISE_INVALID_OPCODE_RET();
2243}
2244
2245/**
2246 * @opmnemonic udvexf30f2b
2247 * @opcode 0x2b
2248 * @oppfx 0xf3
2249 * @opunused vex.modrm
2250 * @opcpuid avx
2251 * @optest ->
2252 * @opdone
2253 */
2254
2255/**
2256 * @opmnemonic udvexf20f2b
2257 * @opcode 0x2b
2258 * @oppfx 0xf2
2259 * @opunused vex.modrm
2260 * @opcpuid avx
2261 * @optest ->
2262 * @opdone
2263 */
2264
2265
2266/* Opcode VEX.0F 0x2c - invalid */
2267/* Opcode VEX.66.0F 0x2c - invalid */
2268/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2269FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2270/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2271FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2272
2273/* Opcode VEX.0F 0x2d - invalid */
2274/* Opcode VEX.66.0F 0x2d - invalid */
2275/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2276FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2277/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2278FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2279
2280
2281/**
2282 * @opcode 0x2e
2283 * @oppfx none
2284 * @opflmodify cf,pf,af,zf,sf,of
2285 * @opflclear af,sf,of
2286 */
2287FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2288{
2289 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2291 if (IEM_IS_MODRM_REG_MODE(bRm))
2292 {
2293 /*
2294 * Register, register.
2295 */
2296 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2297 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2298 IEM_MC_LOCAL(uint32_t, fEFlags);
2299 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2300 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2301 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2302 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2304 IEM_MC_PREPARE_AVX_USAGE();
2305 IEM_MC_FETCH_EFLAGS(fEFlags);
2306 IEM_MC_REF_MXCSR(pfMxcsr);
2307 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2308 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2309 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2310 pfMxcsr, pEFlags, puSrc1, puSrc2);
2311 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2312 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2313 } IEM_MC_ELSE() {
2314 IEM_MC_COMMIT_EFLAGS(fEFlags);
2315 } IEM_MC_ENDIF();
2316
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 else
2321 {
2322 /*
2323 * Register, memory.
2324 */
2325 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2326 IEM_MC_LOCAL(uint32_t, fEFlags);
2327 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2328 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2329 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2330 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2331 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2333
2334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2335 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2336 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2337 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2338
2339 IEM_MC_PREPARE_AVX_USAGE();
2340 IEM_MC_FETCH_EFLAGS(fEFlags);
2341 IEM_MC_REF_MXCSR(pfMxcsr);
2342 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2343 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2344 pfMxcsr, pEFlags, puSrc1, puSrc2);
2345 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2346 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2347 } IEM_MC_ELSE() {
2348 IEM_MC_COMMIT_EFLAGS(fEFlags);
2349 } IEM_MC_ENDIF();
2350
2351 IEM_MC_ADVANCE_RIP_AND_FINISH();
2352 IEM_MC_END();
2353 }
2354}
2355
2356
2357/**
2358 * @opcode 0x2e
2359 * @oppfx 0x66
2360 * @opflmodify cf,pf,af,zf,sf,of
2361 * @opflclear af,sf,of
2362 */
2363FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2364{
2365 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2366 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2367 if (IEM_IS_MODRM_REG_MODE(bRm))
2368 {
2369 /*
2370 * Register, register.
2371 */
2372 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2373 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2374 IEM_MC_LOCAL(uint32_t, fEFlags);
2375 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2376 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2377 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2378 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2380 IEM_MC_PREPARE_AVX_USAGE();
2381 IEM_MC_FETCH_EFLAGS(fEFlags);
2382 IEM_MC_REF_MXCSR(pfMxcsr);
2383 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2384 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2385 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2386 pfMxcsr, pEFlags, puSrc1, puSrc2);
2387 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2388 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2389 } IEM_MC_ELSE() {
2390 IEM_MC_COMMIT_EFLAGS(fEFlags);
2391 } IEM_MC_ENDIF();
2392
2393 IEM_MC_ADVANCE_RIP_AND_FINISH();
2394 IEM_MC_END();
2395 }
2396 else
2397 {
2398 /*
2399 * Register, memory.
2400 */
2401 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2402 IEM_MC_LOCAL(uint32_t, fEFlags);
2403 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2404 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2405 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2406 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2407 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2412 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2413 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2414
2415 IEM_MC_PREPARE_AVX_USAGE();
2416 IEM_MC_FETCH_EFLAGS(fEFlags);
2417 IEM_MC_REF_MXCSR(pfMxcsr);
2418 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2419 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2420 pfMxcsr, pEFlags, puSrc1, puSrc2);
2421 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2422 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2423 } IEM_MC_ELSE() {
2424 IEM_MC_COMMIT_EFLAGS(fEFlags);
2425 } IEM_MC_ENDIF();
2426
2427 IEM_MC_ADVANCE_RIP_AND_FINISH();
2428 IEM_MC_END();
2429 }
2430}
2431
2432
2433/* Opcode VEX.F3.0F 0x2e - invalid */
2434/* Opcode VEX.F2.0F 0x2e - invalid */
2435
2436/**
2437 * @opcode 0x2f
2438 * @oppfx none
2439 * @opflmodify cf,pf,af,zf,sf,of
2440 * @opflclear af,sf,of
2441 */
2442FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2443{
2444 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2446 if (IEM_IS_MODRM_REG_MODE(bRm))
2447 {
2448 /*
2449 * Register, register.
2450 */
2451 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2452 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2453 IEM_MC_LOCAL(uint32_t, fEFlags);
2454 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2455 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2456 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2457 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2459 IEM_MC_PREPARE_AVX_USAGE();
2460 IEM_MC_FETCH_EFLAGS(fEFlags);
2461 IEM_MC_REF_MXCSR(pfMxcsr);
2462 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2463 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2464 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2465 pfMxcsr, pEFlags, puSrc1, puSrc2);
2466 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2467 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2468 } IEM_MC_ELSE() {
2469 IEM_MC_COMMIT_EFLAGS(fEFlags);
2470 } IEM_MC_ENDIF();
2471
2472 IEM_MC_ADVANCE_RIP_AND_FINISH();
2473 IEM_MC_END();
2474 }
2475 else
2476 {
2477 /*
2478 * Register, memory.
2479 */
2480 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2481 IEM_MC_LOCAL(uint32_t, fEFlags);
2482 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2483 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2484 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2485 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2486 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2492 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2493
2494 IEM_MC_PREPARE_AVX_USAGE();
2495 IEM_MC_FETCH_EFLAGS(fEFlags);
2496 IEM_MC_REF_MXCSR(pfMxcsr);
2497 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2498 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2499 pfMxcsr, pEFlags, puSrc1, puSrc2);
2500 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2501 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2502 } IEM_MC_ELSE() {
2503 IEM_MC_COMMIT_EFLAGS(fEFlags);
2504 } IEM_MC_ENDIF();
2505
2506 IEM_MC_ADVANCE_RIP_AND_FINISH();
2507 IEM_MC_END();
2508 }
2509}
2510
2511
2512/**
2513 * @opcode 0x2f
2514 * @oppfx 0x66
2515 * @opflmodify cf,pf,af,zf,sf,of
2516 * @opflclear af,sf,of
2517 */
2518FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2519{
2520 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2522 if (IEM_IS_MODRM_REG_MODE(bRm))
2523 {
2524 /*
2525 * Register, register.
2526 */
2527 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2528 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2529 IEM_MC_LOCAL(uint32_t, fEFlags);
2530 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2531 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2532 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2533 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2534 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2535 IEM_MC_PREPARE_AVX_USAGE();
2536 IEM_MC_FETCH_EFLAGS(fEFlags);
2537 IEM_MC_REF_MXCSR(pfMxcsr);
2538 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2539 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2540 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2541 pfMxcsr, pEFlags, puSrc1, puSrc2);
2542 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2543 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2544 } IEM_MC_ELSE() {
2545 IEM_MC_COMMIT_EFLAGS(fEFlags);
2546 } IEM_MC_ENDIF();
2547
2548 IEM_MC_ADVANCE_RIP_AND_FINISH();
2549 IEM_MC_END();
2550 }
2551 else
2552 {
2553 /*
2554 * Register, memory.
2555 */
2556 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
2557 IEM_MC_LOCAL(uint32_t, fEFlags);
2558 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2559 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2560 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2561 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2562 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2564
2565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2566 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2567 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2568 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2569
2570 IEM_MC_PREPARE_AVX_USAGE();
2571 IEM_MC_FETCH_EFLAGS(fEFlags);
2572 IEM_MC_REF_MXCSR(pfMxcsr);
2573 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2574 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2575 pfMxcsr, pEFlags, puSrc1, puSrc2);
2576 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2577 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2578 } IEM_MC_ELSE() {
2579 IEM_MC_COMMIT_EFLAGS(fEFlags);
2580 } IEM_MC_ENDIF();
2581
2582 IEM_MC_ADVANCE_RIP_AND_FINISH();
2583 IEM_MC_END();
2584 }
2585}
2586
2587
2588/* Opcode VEX.F3.0F 0x2f - invalid */
2589/* Opcode VEX.F2.0F 0x2f - invalid */
2590
2591/* Opcode VEX.0F 0x30 - invalid */
2592/* Opcode VEX.0F 0x31 - invalid */
2593/* Opcode VEX.0F 0x32 - invalid */
2594/* Opcode VEX.0F 0x33 - invalid */
2595/* Opcode VEX.0F 0x34 - invalid */
2596/* Opcode VEX.0F 0x35 - invalid */
2597/* Opcode VEX.0F 0x36 - invalid */
2598/* Opcode VEX.0F 0x37 - invalid */
2599/* Opcode VEX.0F 0x38 - invalid */
2600/* Opcode VEX.0F 0x39 - invalid */
2601/* Opcode VEX.0F 0x3a - invalid */
2602/* Opcode VEX.0F 0x3b - invalid */
2603/* Opcode VEX.0F 0x3c - invalid */
2604/* Opcode VEX.0F 0x3d - invalid */
2605/* Opcode VEX.0F 0x3e - invalid */
2606/* Opcode VEX.0F 0x3f - invalid */
2607/* Opcode VEX.0F 0x40 - invalid */
2608/* Opcode VEX.0F 0x41 - invalid */
2609/* Opcode VEX.0F 0x42 - invalid */
2610/* Opcode VEX.0F 0x43 - invalid */
2611/* Opcode VEX.0F 0x44 - invalid */
2612/* Opcode VEX.0F 0x45 - invalid */
2613/* Opcode VEX.0F 0x46 - invalid */
2614/* Opcode VEX.0F 0x47 - invalid */
2615/* Opcode VEX.0F 0x48 - invalid */
2616/* Opcode VEX.0F 0x49 - invalid */
2617/* Opcode VEX.0F 0x4a - invalid */
2618/* Opcode VEX.0F 0x4b - invalid */
2619/* Opcode VEX.0F 0x4c - invalid */
2620/* Opcode VEX.0F 0x4d - invalid */
2621/* Opcode VEX.0F 0x4e - invalid */
2622/* Opcode VEX.0F 0x4f - invalid */
2623
2624
2625/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2626FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2627{
2628 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 if (IEM_IS_MODRM_REG_MODE(bRm))
2631 {
2632 /*
2633 * Register, register.
2634 */
2635 if (pVCpu->iem.s.uVexLength == 0)
2636 {
2637 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2638 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2639 IEM_MC_LOCAL(uint8_t, u8Dst);
2640 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2641 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2642 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2643 IEM_MC_PREPARE_AVX_USAGE();
2644 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2645 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2646 pu8Dst, puSrc);
2647 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2648 IEM_MC_ADVANCE_RIP_AND_FINISH();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2654 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2655 IEM_MC_LOCAL(uint8_t, u8Dst);
2656 IEM_MC_LOCAL(RTUINT256U, uSrc);
2657 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2658 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2659
2660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2661 IEM_MC_PREPARE_AVX_USAGE();
2662 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2663 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2664 pu8Dst, puSrc);
2665 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2666 IEM_MC_ADVANCE_RIP_AND_FINISH();
2667 IEM_MC_END();
2668 }
2669 }
2670 /* No memory operand. */
2671 else
2672 IEMOP_RAISE_INVALID_OPCODE_RET();
2673}
2674
2675
2676/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2677FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2678{
2679 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681 if (IEM_IS_MODRM_REG_MODE(bRm))
2682 {
2683 /*
2684 * Register, register.
2685 */
2686 if (pVCpu->iem.s.uVexLength == 0)
2687 {
2688 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
2689 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2690 IEM_MC_LOCAL(uint8_t, u8Dst);
2691 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2692 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2693 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2694 IEM_MC_PREPARE_AVX_USAGE();
2695 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2696 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2697 pu8Dst, puSrc);
2698 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
2705 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2706 IEM_MC_LOCAL(uint8_t, u8Dst);
2707 IEM_MC_LOCAL(RTUINT256U, uSrc);
2708 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2709 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2710
2711 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2712 IEM_MC_PREPARE_AVX_USAGE();
2713 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2714 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2715 pu8Dst, puSrc);
2716 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2717 IEM_MC_ADVANCE_RIP_AND_FINISH();
2718 IEM_MC_END();
2719 }
2720 }
2721 /* No memory operand. */
2722 else
2723 IEMOP_RAISE_INVALID_OPCODE_RET();
2724}
2725
2726
2727/* Opcode VEX.F3.0F 0x50 - invalid */
2728/* Opcode VEX.F2.0F 0x50 - invalid */
2729
2730/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2731FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2732/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2733FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2734/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2735FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2736/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2737FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2738
2739/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2740FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2741/* Opcode VEX.66.0F 0x52 - invalid */
2742/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2743FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2744/* Opcode VEX.F2.0F 0x52 - invalid */
2745
2746/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2747FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2748/* Opcode VEX.66.0F 0x53 - invalid */
2749/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2750FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2751/* Opcode VEX.F2.0F 0x53 - invalid */
2752
2753
2754/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2755FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2756{
2757 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2758 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2759 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2760}
2761
2762
2763/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2764FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2765{
2766 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2767 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2768 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2769}
2770
2771
2772/* Opcode VEX.F3.0F 0x54 - invalid */
2773/* Opcode VEX.F2.0F 0x54 - invalid */
2774
2775
2776/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2777FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2778{
2779 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2781 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2782}
2783
2784
2785/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2786FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2787{
2788 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2790 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2791}
2792
2793
2794/* Opcode VEX.F3.0F 0x55 - invalid */
2795/* Opcode VEX.F2.0F 0x55 - invalid */
2796
2797/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2798FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2799{
2800 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2802 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2803}
2804
2805
2806/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2807FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2808{
2809 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2810 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2811 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2812}
2813
2814
2815/* Opcode VEX.F3.0F 0x56 - invalid */
2816/* Opcode VEX.F2.0F 0x56 - invalid */
2817
2818
2819/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2820FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2821{
2822 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2823 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2824 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2825}
2826
2827
2828/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2829FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2830{
2831 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2834}
2835
2836
2837/* Opcode VEX.F3.0F 0x57 - invalid */
2838/* Opcode VEX.F2.0F 0x57 - invalid */
2839
2840/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2841FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2842/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2843FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2844/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2845FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2846/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2847FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2848
2849/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2850FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2851/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2852FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2853/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2854FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2855/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2856FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2857
2858/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2859FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2860/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2861FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2862/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2863FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2864/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2865FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2866
2867/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2868FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2869/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2870FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2871/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2872FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2873/* Opcode VEX.F2.0F 0x5b - invalid */
2874
2875/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2876FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2877/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2878FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2879/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2880FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2881/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2882FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2883
2884/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2885FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2886/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2887FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2888/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2889FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2890/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2891FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2892
2893/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2894FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2895/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2896FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2897/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2898FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2899/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2900FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2901
2902/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2903FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2904/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2905FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2906/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2907FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2908/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2909FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2910
2911
2912/* Opcode VEX.0F 0x60 - invalid */
2913
2914
2915/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2916FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2917{
2918 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2919 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2921}
2922
2923
2924/* Opcode VEX.F3.0F 0x60 - invalid */
2925
2926
2927/* Opcode VEX.0F 0x61 - invalid */
2928
2929
2930/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2931FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2932{
2933 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2934 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2935 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2936}
2937
2938
2939/* Opcode VEX.F3.0F 0x61 - invalid */
2940
2941
2942/* Opcode VEX.0F 0x62 - invalid */
2943
2944/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2945FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2946{
2947 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2948 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2949 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2950}
2951
2952
2953/* Opcode VEX.F3.0F 0x62 - invalid */
2954
2955
2956
2957/* Opcode VEX.0F 0x63 - invalid */
2958
2959
2960/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2961FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2962{
2963 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2964 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2965 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2966}
2967
2968
2969/* Opcode VEX.F3.0F 0x63 - invalid */
2970
2971/* Opcode VEX.0F 0x64 - invalid */
2972
2973
2974/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2975FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2976{
2977 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2978 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2979 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2980}
2981
2982
2983/* Opcode VEX.F3.0F 0x64 - invalid */
2984
2985/* Opcode VEX.0F 0x65 - invalid */
2986
2987
2988/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2989FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2990{
2991 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2992 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2994}
2995
2996
2997/* Opcode VEX.F3.0F 0x65 - invalid */
2998
2999/* Opcode VEX.0F 0x66 - invalid */
3000
3001
3002/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3003FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3004{
3005 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3006 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3008}
3009
3010
3011/* Opcode VEX.F3.0F 0x66 - invalid */
3012
3013/* Opcode VEX.0F 0x67 - invalid */
3014
3015
3016/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3017FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3018{
3019 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3020 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3021 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3022}
3023
3024
3025/* Opcode VEX.F3.0F 0x67 - invalid */
3026
3027
3028///**
3029// * Common worker for SSE2 instructions on the form:
3030// * pxxxx xmm1, xmm2/mem128
3031// *
3032// * The 2nd operand is the second half of a register, which in the memory case
3033// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3034// * where it may read the full 128 bits or only the upper 64 bits.
3035// *
3036// * Exceptions type 4.
3037// */
3038//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3039//{
3040// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3041// if (IEM_IS_MODRM_REG_MODE(bRm))
3042// {
3043// /*
3044// * Register, register.
3045// */
3046// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3047// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3048// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3049// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3050// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3051// IEM_MC_PREPARE_SSE_USAGE();
3052// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3053// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3054// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3055// IEM_MC_ADVANCE_RIP_AND_FINISH();
3056// IEM_MC_END();
3057// }
3058// else
3059// {
3060// /*
3061// * Register, memory.
3062// */
3063// IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3064// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3065// IEM_MC_LOCAL(RTUINT128U, uSrc);
3066// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3067// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3068//
3069// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3070// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3071// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3072// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3073//
3074// IEM_MC_PREPARE_SSE_USAGE();
3075// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3076// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3077//
3078// IEM_MC_ADVANCE_RIP_AND_FINISH();
3079// IEM_MC_END();
3080// }
3081// return VINF_SUCCESS;
3082//}
3083
3084
3085/* Opcode VEX.0F 0x68 - invalid */
3086
3087/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3088FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3089{
3090 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3091 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3093}
3094
3095
3096/* Opcode VEX.F3.0F 0x68 - invalid */
3097
3098
3099/* Opcode VEX.0F 0x69 - invalid */
3100
3101
3102/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3103FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3104{
3105 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3106 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3108}
3109
3110
3111/* Opcode VEX.F3.0F 0x69 - invalid */
3112
3113
3114/* Opcode VEX.0F 0x6a - invalid */
3115
3116
3117/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3118FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3119{
3120 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3121 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3122 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3123}
3124
3125
3126/* Opcode VEX.F3.0F 0x6a - invalid */
3127
3128
3129/* Opcode VEX.0F 0x6b - invalid */
3130
3131
3132/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3133FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3134{
3135 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3136 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3137 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3138}
3139
3140
3141/* Opcode VEX.F3.0F 0x6b - invalid */
3142
3143
3144/* Opcode VEX.0F 0x6c - invalid */
3145
3146
3147/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3148FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3149{
3150 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3151 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3153}
3154
3155
3156/* Opcode VEX.F3.0F 0x6c - invalid */
3157/* Opcode VEX.F2.0F 0x6c - invalid */
3158
3159
3160/* Opcode VEX.0F 0x6d - invalid */
3161
3162
3163/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3164FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3165{
3166 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3167 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3168 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3169}
3170
3171
3172/* Opcode VEX.F3.0F 0x6d - invalid */
3173
3174
3175/* Opcode VEX.0F 0x6e - invalid */
3176
3177FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3181 {
3182 /**
3183 * @opcode 0x6e
3184 * @opcodesub rex.w=1
3185 * @oppfx 0x66
3186 * @opcpuid avx
3187 * @opgroup og_avx_simdint_datamov
3188 * @opxcpttype 5
3189 * @optest 64-bit / op1=1 op2=2 -> op1=2
3190 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3191 */
3192 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3193 if (IEM_IS_MODRM_REG_MODE(bRm))
3194 {
3195 /* XMM, greg64 */
3196 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3197 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199
3200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3201 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3202
3203 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3204 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3205
3206 IEM_MC_ADVANCE_RIP_AND_FINISH();
3207 IEM_MC_END();
3208 }
3209 else
3210 {
3211 /* XMM, [mem64] */
3212 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3214 IEM_MC_LOCAL(uint64_t, u64Tmp);
3215
3216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3217 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3219 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3220
3221 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3222 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3223
3224 IEM_MC_ADVANCE_RIP_AND_FINISH();
3225 IEM_MC_END();
3226 }
3227 }
3228 else
3229 {
3230 /**
3231 * @opdone
3232 * @opcode 0x6e
3233 * @opcodesub rex.w=0
3234 * @oppfx 0x66
3235 * @opcpuid avx
3236 * @opgroup og_avx_simdint_datamov
3237 * @opxcpttype 5
3238 * @opfunction iemOp_vmovd_q_Vy_Ey
3239 * @optest op1=1 op2=2 -> op1=2
3240 * @optest op1=0 op2=-42 -> op1=-42
3241 */
3242 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3243 if (IEM_IS_MODRM_REG_MODE(bRm))
3244 {
3245 /* XMM, greg32 */
3246 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3247 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3248 IEM_MC_LOCAL(uint32_t, u32Tmp);
3249
3250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3251 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3252
3253 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3254 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3255
3256 IEM_MC_ADVANCE_RIP_AND_FINISH();
3257 IEM_MC_END();
3258 }
3259 else
3260 {
3261 /* XMM, [mem32] */
3262 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3264 IEM_MC_LOCAL(uint32_t, u32Tmp);
3265
3266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3267 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3270
3271 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3272 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3273
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 IEM_MC_END();
3276 }
3277 }
3278}
3279
3280
3281/* Opcode VEX.F3.0F 0x6e - invalid */
3282
3283
3284/* Opcode VEX.0F 0x6f - invalid */
3285
3286/**
3287 * @opcode 0x6f
3288 * @oppfx 0x66
3289 * @opcpuid avx
3290 * @opgroup og_avx_simdint_datamove
3291 * @opxcpttype 1
3292 * @optest op1=1 op2=2 -> op1=2
3293 * @optest op1=0 op2=-42 -> op1=-42
3294 */
3295FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3296{
3297 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3298 Assert(pVCpu->iem.s.uVexLength <= 1);
3299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3300 if (IEM_IS_MODRM_REG_MODE(bRm))
3301 {
3302 /*
3303 * Register, register.
3304 */
3305 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3306 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3307
3308 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3309 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3310 if (pVCpu->iem.s.uVexLength == 0)
3311 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3312 IEM_GET_MODRM_RM(pVCpu, bRm));
3313 else
3314 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3315 IEM_GET_MODRM_RM(pVCpu, bRm));
3316 IEM_MC_ADVANCE_RIP_AND_FINISH();
3317 IEM_MC_END();
3318 }
3319 else if (pVCpu->iem.s.uVexLength == 0)
3320 {
3321 /*
3322 * Register, memory128.
3323 */
3324 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3325 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3327
3328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3329 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3330 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3331 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3332
3333 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3334 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3335
3336 IEM_MC_ADVANCE_RIP_AND_FINISH();
3337 IEM_MC_END();
3338 }
3339 else
3340 {
3341 /*
3342 * Register, memory256.
3343 */
3344 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3345 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3347
3348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3351 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3352
3353 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3354 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3355
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 IEM_MC_END();
3358 }
3359}
3360
3361/**
3362 * @opcode 0x6f
3363 * @oppfx 0xf3
3364 * @opcpuid avx
3365 * @opgroup og_avx_simdint_datamove
3366 * @opxcpttype 4UA
3367 * @optest op1=1 op2=2 -> op1=2
3368 * @optest op1=0 op2=-42 -> op1=-42
3369 */
3370FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3371{
3372 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3373 Assert(pVCpu->iem.s.uVexLength <= 1);
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 if (IEM_IS_MODRM_REG_MODE(bRm))
3376 {
3377 /*
3378 * Register, register.
3379 */
3380 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3381 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3382
3383 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3384 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3385 if (pVCpu->iem.s.uVexLength == 0)
3386 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3387 IEM_GET_MODRM_RM(pVCpu, bRm));
3388 else
3389 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3390 IEM_GET_MODRM_RM(pVCpu, bRm));
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 IEM_MC_END();
3393 }
3394 else if (pVCpu->iem.s.uVexLength == 0)
3395 {
3396 /*
3397 * Register, memory128.
3398 */
3399 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3400 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3402
3403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3404 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3405 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3406 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3407
3408 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3409 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3410
3411 IEM_MC_ADVANCE_RIP_AND_FINISH();
3412 IEM_MC_END();
3413 }
3414 else
3415 {
3416 /*
3417 * Register, memory256.
3418 */
3419 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3420 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3422
3423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3424 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3425 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3426 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3427
3428 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3429 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3430
3431 IEM_MC_ADVANCE_RIP_AND_FINISH();
3432 IEM_MC_END();
3433 }
3434}
3435
3436
3437/* Opcode VEX.0F 0x70 - invalid */
3438
3439
3440/**
3441 * Common worker for AVX/AVX2 instructions on the forms:
3442 * - vpxxx xmm0, xmm2/mem128, imm8
3443 * - vpxxx ymm0, ymm2/mem256, imm8
3444 *
3445 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3446 */
3447FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3448{
3449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3450 if (IEM_IS_MODRM_REG_MODE(bRm))
3451 {
3452 /*
3453 * Register, register.
3454 */
3455 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3456 if (pVCpu->iem.s.uVexLength)
3457 {
3458 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3459 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3460 IEM_MC_LOCAL(RTUINT256U, uDst);
3461 IEM_MC_LOCAL(RTUINT256U, uSrc);
3462 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3463 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3464 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3465 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3466 IEM_MC_PREPARE_AVX_USAGE();
3467 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3468 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3469 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3470 IEM_MC_ADVANCE_RIP_AND_FINISH();
3471 IEM_MC_END();
3472 }
3473 else
3474 {
3475 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3476 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3477 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3478 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3479 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3480 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3481 IEM_MC_PREPARE_AVX_USAGE();
3482 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3483 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3484 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3485 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3486 IEM_MC_ADVANCE_RIP_AND_FINISH();
3487 IEM_MC_END();
3488 }
3489 }
3490 else
3491 {
3492 /*
3493 * Register, memory.
3494 */
3495 if (pVCpu->iem.s.uVexLength)
3496 {
3497 IEM_MC_BEGIN(3, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
3498 IEM_MC_LOCAL(RTUINT256U, uDst);
3499 IEM_MC_LOCAL(RTUINT256U, uSrc);
3500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3501 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3502 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3503
3504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3505 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3506 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3507 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3508 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3509 IEM_MC_PREPARE_AVX_USAGE();
3510
3511 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3512 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3513 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3514
3515 IEM_MC_ADVANCE_RIP_AND_FINISH();
3516 IEM_MC_END();
3517 }
3518 else
3519 {
3520 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3521 IEM_MC_LOCAL(RTUINT128U, uSrc);
3522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3523 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3524 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3525
3526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3527 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3528 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3531 IEM_MC_PREPARE_AVX_USAGE();
3532
3533 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3534 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3535 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3536 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3537
3538 IEM_MC_ADVANCE_RIP_AND_FINISH();
3539 IEM_MC_END();
3540 }
3541 }
3542}
3543
3544
3545/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3546FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3547{
3548 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3549 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3550 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3551
3552}
3553
3554
3555/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3556FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3557{
3558 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3559 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3560 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3561
3562}
3563
3564
3565/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3566FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3567{
3568 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3569 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3570 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3571}
3572
3573
3574/**
3575 * Common worker(s) for AVX/AVX2 instructions on the forms:
3576 * - vpxxx xmm0, xmm2, imm8
3577 * - vpxxx ymm0, ymm2, imm8
3578 *
3579 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3580 */
3581FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3582{
3583 if (IEM_IS_MODRM_REG_MODE(bRm))
3584 {
3585 /*
3586 * Register, register.
3587 */
3588 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3589 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3590 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3591 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3592 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3595 IEM_MC_PREPARE_AVX_USAGE();
3596 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3597 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3598 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3599 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3600 IEM_MC_ADVANCE_RIP_AND_FINISH();
3601 IEM_MC_END();
3602 }
3603 /* No memory operand. */
3604 else
3605 IEMOP_RAISE_INVALID_OPCODE_RET();
3606}
3607
3608FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3609{
3610 if (IEM_IS_MODRM_REG_MODE(bRm))
3611 {
3612 /*
3613 * Register, register.
3614 */
3615 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3616 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
3617 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3618 IEM_MC_LOCAL(RTUINT256U, uDst);
3619 IEM_MC_LOCAL(RTUINT256U, uSrc);
3620 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3621 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3622 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3623 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3624 IEM_MC_PREPARE_AVX_USAGE();
3625 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3626 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3627 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3628 IEM_MC_ADVANCE_RIP_AND_FINISH();
3629 IEM_MC_END();
3630 }
3631 /* No memory operand. */
3632 else
3633 IEMOP_RAISE_INVALID_OPCODE_RET();
3634}
3635
3636
3637/* Opcode VEX.0F 0x71 11/2 - invalid. */
3638/** Opcode VEX.66.0F 0x71 11/2. */
3639FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3640{
3641 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3642 if (pVCpu->iem.s.uVexLength)
3643 {
3644 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3645 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3646 }
3647 else
3648 {
3649 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3650 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3651 }
3652}
3653
3654
3655/* Opcode VEX.0F 0x71 11/4 - invalid */
3656/** Opcode VEX.66.0F 0x71 11/4. */
3657FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3658{
3659 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3660 if (pVCpu->iem.s.uVexLength)
3661 {
3662 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3663 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3664 }
3665 else
3666 {
3667 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3668 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3669 }
3670}
3671
3672/* Opcode VEX.0F 0x71 11/6 - invalid */
3673
3674/** Opcode VEX.66.0F 0x71 11/6. */
3675FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3676{
3677 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3678 if (pVCpu->iem.s.uVexLength)
3679 {
3680 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3681 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3682 }
3683 else
3684 {
3685 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3686 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3687 }
3688}
3689
3690
3691/**
3692 * VEX Group 12 jump table for register variant.
3693 */
3694IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3695{
3696 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3697 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3698 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3699 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3700 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3701 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3702 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3703 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3704};
3705AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3706
3707
3708/** Opcode VEX.0F 0x71. */
3709FNIEMOP_DEF(iemOp_VGrp12)
3710{
3711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3712 if (IEM_IS_MODRM_REG_MODE(bRm))
3713 /* register, register */
3714 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3715 + pVCpu->iem.s.idxPrefix], bRm);
3716 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3717}
3718
3719
3720/* Opcode VEX.0F 0x72 11/2 - invalid. */
3721/** Opcode VEX.66.0F 0x72 11/2. */
3722FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3723{
3724 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3725 if (pVCpu->iem.s.uVexLength)
3726 {
3727 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3728 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3729 }
3730 else
3731 {
3732 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3733 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3734 }
3735}
3736
3737
3738/* Opcode VEX.0F 0x72 11/4 - invalid. */
3739/** Opcode VEX.66.0F 0x72 11/4. */
3740FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3741{
3742 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3743 if (pVCpu->iem.s.uVexLength)
3744 {
3745 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3746 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3747 }
3748 else
3749 {
3750 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3751 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3752 }
3753}
3754
3755/* Opcode VEX.0F 0x72 11/6 - invalid. */
3756
3757/** Opcode VEX.66.0F 0x72 11/6. */
3758FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3759{
3760 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3761 if (pVCpu->iem.s.uVexLength)
3762 {
3763 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3764 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3765 }
3766 else
3767 {
3768 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3769 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3770 }
3771}
3772
3773
3774/**
3775 * Group 13 jump table for register variant.
3776 */
3777IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3778{
3779 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3780 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3781 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3782 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3783 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3784 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3785 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3786 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3787};
3788AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3789
3790/** Opcode VEX.0F 0x72. */
3791FNIEMOP_DEF(iemOp_VGrp13)
3792{
3793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3794 if (IEM_IS_MODRM_REG_MODE(bRm))
3795 /* register, register */
3796 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3797 + pVCpu->iem.s.idxPrefix], bRm);
3798 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3799}
3800
3801
3802/* Opcode VEX.0F 0x73 11/2 - invalid. */
3803/** Opcode VEX.66.0F 0x73 11/2. */
3804FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3805{
3806 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3807 if (pVCpu->iem.s.uVexLength)
3808 {
3809 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3811 }
3812 else
3813 {
3814 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3815 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3816 }
3817}
3818
3819
3820/** Opcode VEX.66.0F 0x73 11/3. */
3821FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3822
3823/* Opcode VEX.0F 0x73 11/6 - invalid. */
3824
3825/** Opcode VEX.66.0F 0x73 11/6. */
3826FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3827{
3828 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3829 if (pVCpu->iem.s.uVexLength)
3830 {
3831 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3832 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3833 }
3834 else
3835 {
3836 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3837 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3838 }
3839}
3840
3841/** Opcode VEX.66.0F 0x73 11/7. */
3842FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3843
3844/**
3845 * Group 14 jump table for register variant.
3846 */
3847IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3848{
3849 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3850 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3851 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3852 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3853 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3854 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3855 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3856 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3857};
3858AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3859
3860
3861/** Opcode VEX.0F 0x73. */
3862FNIEMOP_DEF(iemOp_VGrp14)
3863{
3864 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3865 if (IEM_IS_MODRM_REG_MODE(bRm))
3866 /* register, register */
3867 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3868 + pVCpu->iem.s.idxPrefix], bRm);
3869 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3870}
3871
3872
3873/* Opcode VEX.0F 0x74 - invalid */
3874
3875
3876/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3877FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3878{
3879 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3880 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3881 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3882}
3883
3884/* Opcode VEX.F3.0F 0x74 - invalid */
3885/* Opcode VEX.F2.0F 0x74 - invalid */
3886
3887
3888/* Opcode VEX.0F 0x75 - invalid */
3889
3890
3891/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3892FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3893{
3894 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3895 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3896 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3897}
3898
3899
3900/* Opcode VEX.F3.0F 0x75 - invalid */
3901/* Opcode VEX.F2.0F 0x75 - invalid */
3902
3903
3904/* Opcode VEX.0F 0x76 - invalid */
3905
3906
3907/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3908FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3909{
3910 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3911 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3912 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3913}
3914
3915
3916/* Opcode VEX.F3.0F 0x76 - invalid */
3917/* Opcode VEX.F2.0F 0x76 - invalid */
3918
3919
3920/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3921FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3922{
3923 Assert(pVCpu->iem.s.uVexLength <= 1);
3924 if (pVCpu->iem.s.uVexLength == 0)
3925 {
3926 /*
3927 * 128-bit: vzeroupper
3928 */
3929 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3930 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
3931
3932 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3934 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3935
3936 IEM_MC_CLEAR_YREG_128_UP(0);
3937 IEM_MC_CLEAR_YREG_128_UP(1);
3938 IEM_MC_CLEAR_YREG_128_UP(2);
3939 IEM_MC_CLEAR_YREG_128_UP(3);
3940 IEM_MC_CLEAR_YREG_128_UP(4);
3941 IEM_MC_CLEAR_YREG_128_UP(5);
3942 IEM_MC_CLEAR_YREG_128_UP(6);
3943 IEM_MC_CLEAR_YREG_128_UP(7);
3944
3945 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3946 {
3947 IEM_MC_CLEAR_YREG_128_UP( 8);
3948 IEM_MC_CLEAR_YREG_128_UP( 9);
3949 IEM_MC_CLEAR_YREG_128_UP(10);
3950 IEM_MC_CLEAR_YREG_128_UP(11);
3951 IEM_MC_CLEAR_YREG_128_UP(12);
3952 IEM_MC_CLEAR_YREG_128_UP(13);
3953 IEM_MC_CLEAR_YREG_128_UP(14);
3954 IEM_MC_CLEAR_YREG_128_UP(15);
3955 }
3956
3957 IEM_MC_ADVANCE_RIP_AND_FINISH();
3958 IEM_MC_END();
3959 }
3960 else
3961 {
3962 /*
3963 * 256-bit: vzeroall
3964 */
3965 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3966 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
3967
3968 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3969 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3970 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3971
3972 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3973 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3974 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3975 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3976 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3977 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3978 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3979 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3980 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3981
3982 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3983 {
3984 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3985 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3986 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3987 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3988 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3989 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3990 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3991 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3992 }
3993
3994 IEM_MC_ADVANCE_RIP_AND_FINISH();
3995 IEM_MC_END();
3996 }
3997}
3998
3999
4000/* Opcode VEX.66.0F 0x77 - invalid */
4001/* Opcode VEX.F3.0F 0x77 - invalid */
4002/* Opcode VEX.F2.0F 0x77 - invalid */
4003
4004/* Opcode VEX.0F 0x78 - invalid */
4005/* Opcode VEX.66.0F 0x78 - invalid */
4006/* Opcode VEX.F3.0F 0x78 - invalid */
4007/* Opcode VEX.F2.0F 0x78 - invalid */
4008
4009/* Opcode VEX.0F 0x79 - invalid */
4010/* Opcode VEX.66.0F 0x79 - invalid */
4011/* Opcode VEX.F3.0F 0x79 - invalid */
4012/* Opcode VEX.F2.0F 0x79 - invalid */
4013
4014/* Opcode VEX.0F 0x7a - invalid */
4015/* Opcode VEX.66.0F 0x7a - invalid */
4016/* Opcode VEX.F3.0F 0x7a - invalid */
4017/* Opcode VEX.F2.0F 0x7a - invalid */
4018
4019/* Opcode VEX.0F 0x7b - invalid */
4020/* Opcode VEX.66.0F 0x7b - invalid */
4021/* Opcode VEX.F3.0F 0x7b - invalid */
4022/* Opcode VEX.F2.0F 0x7b - invalid */
4023
4024/* Opcode VEX.0F 0x7c - invalid */
4025/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4026FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4027/* Opcode VEX.F3.0F 0x7c - invalid */
4028/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4029FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
4030
4031/* Opcode VEX.0F 0x7d - invalid */
4032/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4033FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4034/* Opcode VEX.F3.0F 0x7d - invalid */
4035/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4036FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4037
4038
4039/* Opcode VEX.0F 0x7e - invalid */
4040
4041FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4042{
4043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4044 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4045 {
4046 /**
4047 * @opcode 0x7e
4048 * @opcodesub rex.w=1
4049 * @oppfx 0x66
4050 * @opcpuid avx
4051 * @opgroup og_avx_simdint_datamov
4052 * @opxcpttype 5
4053 * @optest 64-bit / op1=1 op2=2 -> op1=2
4054 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4055 */
4056 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4057 if (IEM_IS_MODRM_REG_MODE(bRm))
4058 {
4059 /* greg64, XMM */
4060 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
4061 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4062 IEM_MC_LOCAL(uint64_t, u64Tmp);
4063
4064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4066
4067 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4068 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4069
4070 IEM_MC_ADVANCE_RIP_AND_FINISH();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 /* [mem64], XMM */
4076 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
4077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4078 IEM_MC_LOCAL(uint64_t, u64Tmp);
4079
4080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4081 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4082 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4083 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4084
4085 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4086 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4087
4088 IEM_MC_ADVANCE_RIP_AND_FINISH();
4089 IEM_MC_END();
4090 }
4091 }
4092 else
4093 {
4094 /**
4095 * @opdone
4096 * @opcode 0x7e
4097 * @opcodesub rex.w=0
4098 * @oppfx 0x66
4099 * @opcpuid avx
4100 * @opgroup og_avx_simdint_datamov
4101 * @opxcpttype 5
4102 * @opfunction iemOp_vmovd_q_Vy_Ey
4103 * @optest op1=1 op2=2 -> op1=2
4104 * @optest op1=0 op2=-42 -> op1=-42
4105 */
4106 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4107 if (IEM_IS_MODRM_REG_MODE(bRm))
4108 {
4109 /* greg32, XMM */
4110 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4111 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4112 IEM_MC_LOCAL(uint32_t, u32Tmp);
4113
4114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4115 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4116
4117 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4118 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4119
4120 IEM_MC_ADVANCE_RIP_AND_FINISH();
4121 IEM_MC_END();
4122 }
4123 else
4124 {
4125 /* [mem32], XMM */
4126 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4128 IEM_MC_LOCAL(uint32_t, u32Tmp);
4129
4130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4131 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4133 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4134
4135 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4136 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4137
4138 IEM_MC_ADVANCE_RIP_AND_FINISH();
4139 IEM_MC_END();
4140 }
4141 }
4142}
4143
4144
4145/**
4146 * @opcode 0x7e
4147 * @oppfx 0xf3
4148 * @opcpuid avx
4149 * @opgroup og_avx_pcksclr_datamove
4150 * @opxcpttype none
4151 * @optest op1=1 op2=2 -> op1=2
4152 * @optest op1=0 op2=-42 -> op1=-42
4153 */
4154FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4155{
4156 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4158 if (IEM_IS_MODRM_REG_MODE(bRm))
4159 {
4160 /*
4161 * Register, register.
4162 */
4163 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4164 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4165
4166 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4167 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4168
4169 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4170 IEM_GET_MODRM_RM(pVCpu, bRm));
4171 IEM_MC_ADVANCE_RIP_AND_FINISH();
4172 IEM_MC_END();
4173 }
4174 else
4175 {
4176 /*
4177 * Memory, register.
4178 */
4179 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4180 IEM_MC_LOCAL(uint64_t, uSrc);
4181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4182
4183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4184 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4185 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4186 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4187
4188 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4189 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4190
4191 IEM_MC_ADVANCE_RIP_AND_FINISH();
4192 IEM_MC_END();
4193 }
4194
4195}
4196/* Opcode VEX.F2.0F 0x7e - invalid */
4197
4198
4199/* Opcode VEX.0F 0x7f - invalid */
4200
4201/**
4202 * @opcode 0x7f
4203 * @oppfx 0x66
4204 * @opcpuid avx
4205 * @opgroup og_avx_simdint_datamove
4206 * @opxcpttype 1
4207 * @optest op1=1 op2=2 -> op1=2
4208 * @optest op1=0 op2=-42 -> op1=-42
4209 */
4210FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4211{
4212 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4213 Assert(pVCpu->iem.s.uVexLength <= 1);
4214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4215 if (IEM_IS_MODRM_REG_MODE(bRm))
4216 {
4217 /*
4218 * Register, register.
4219 */
4220 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4221 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4222
4223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4224 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4225 if (pVCpu->iem.s.uVexLength == 0)
4226 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4227 IEM_GET_MODRM_REG(pVCpu, bRm));
4228 else
4229 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4230 IEM_GET_MODRM_REG(pVCpu, bRm));
4231 IEM_MC_ADVANCE_RIP_AND_FINISH();
4232 IEM_MC_END();
4233 }
4234 else if (pVCpu->iem.s.uVexLength == 0)
4235 {
4236 /*
4237 * Register, memory128.
4238 */
4239 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4240 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4242
4243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4244 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4247
4248 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4249 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4250
4251 IEM_MC_ADVANCE_RIP_AND_FINISH();
4252 IEM_MC_END();
4253 }
4254 else
4255 {
4256 /*
4257 * Register, memory256.
4258 */
4259 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4260 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4262
4263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4264 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4265 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4266 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4267
4268 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4269 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4270
4271 IEM_MC_ADVANCE_RIP_AND_FINISH();
4272 IEM_MC_END();
4273 }
4274}
4275
4276
4277/**
4278 * @opcode 0x7f
4279 * @oppfx 0xf3
4280 * @opcpuid avx
4281 * @opgroup og_avx_simdint_datamove
4282 * @opxcpttype 4UA
4283 * @optest op1=1 op2=2 -> op1=2
4284 * @optest op1=0 op2=-42 -> op1=-42
4285 */
4286FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4287{
4288 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4289 Assert(pVCpu->iem.s.uVexLength <= 1);
4290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4291 if (IEM_IS_MODRM_REG_MODE(bRm))
4292 {
4293 /*
4294 * Register, register.
4295 */
4296 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4297 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4298
4299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4300 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4301 if (pVCpu->iem.s.uVexLength == 0)
4302 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4303 IEM_GET_MODRM_REG(pVCpu, bRm));
4304 else
4305 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4306 IEM_GET_MODRM_REG(pVCpu, bRm));
4307 IEM_MC_ADVANCE_RIP_AND_FINISH();
4308 IEM_MC_END();
4309 }
4310 else if (pVCpu->iem.s.uVexLength == 0)
4311 {
4312 /*
4313 * Register, memory128.
4314 */
4315 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4316 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4318
4319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4320 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4323
4324 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4325 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4326
4327 IEM_MC_ADVANCE_RIP_AND_FINISH();
4328 IEM_MC_END();
4329 }
4330 else
4331 {
4332 /*
4333 * Register, memory256.
4334 */
4335 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4336 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4338
4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4340 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4341 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4342 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4343
4344 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4345 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4346
4347 IEM_MC_ADVANCE_RIP_AND_FINISH();
4348 IEM_MC_END();
4349 }
4350}
4351
4352/* Opcode VEX.F2.0F 0x7f - invalid */
4353
4354
4355/* Opcode VEX.0F 0x80 - invalid */
4356/* Opcode VEX.0F 0x81 - invalid */
4357/* Opcode VEX.0F 0x82 - invalid */
4358/* Opcode VEX.0F 0x83 - invalid */
4359/* Opcode VEX.0F 0x84 - invalid */
4360/* Opcode VEX.0F 0x85 - invalid */
4361/* Opcode VEX.0F 0x86 - invalid */
4362/* Opcode VEX.0F 0x87 - invalid */
4363/* Opcode VEX.0F 0x88 - invalid */
4364/* Opcode VEX.0F 0x89 - invalid */
4365/* Opcode VEX.0F 0x8a - invalid */
4366/* Opcode VEX.0F 0x8b - invalid */
4367/* Opcode VEX.0F 0x8c - invalid */
4368/* Opcode VEX.0F 0x8d - invalid */
4369/* Opcode VEX.0F 0x8e - invalid */
4370/* Opcode VEX.0F 0x8f - invalid */
4371/* Opcode VEX.0F 0x90 - invalid */
4372/* Opcode VEX.0F 0x91 - invalid */
4373/* Opcode VEX.0F 0x92 - invalid */
4374/* Opcode VEX.0F 0x93 - invalid */
4375/* Opcode VEX.0F 0x94 - invalid */
4376/* Opcode VEX.0F 0x95 - invalid */
4377/* Opcode VEX.0F 0x96 - invalid */
4378/* Opcode VEX.0F 0x97 - invalid */
4379/* Opcode VEX.0F 0x98 - invalid */
4380/* Opcode VEX.0F 0x99 - invalid */
4381/* Opcode VEX.0F 0x9a - invalid */
4382/* Opcode VEX.0F 0x9b - invalid */
4383/* Opcode VEX.0F 0x9c - invalid */
4384/* Opcode VEX.0F 0x9d - invalid */
4385/* Opcode VEX.0F 0x9e - invalid */
4386/* Opcode VEX.0F 0x9f - invalid */
4387/* Opcode VEX.0F 0xa0 - invalid */
4388/* Opcode VEX.0F 0xa1 - invalid */
4389/* Opcode VEX.0F 0xa2 - invalid */
4390/* Opcode VEX.0F 0xa3 - invalid */
4391/* Opcode VEX.0F 0xa4 - invalid */
4392/* Opcode VEX.0F 0xa5 - invalid */
4393/* Opcode VEX.0F 0xa6 - invalid */
4394/* Opcode VEX.0F 0xa7 - invalid */
4395/* Opcode VEX.0F 0xa8 - invalid */
4396/* Opcode VEX.0F 0xa9 - invalid */
4397/* Opcode VEX.0F 0xaa - invalid */
4398/* Opcode VEX.0F 0xab - invalid */
4399/* Opcode VEX.0F 0xac - invalid */
4400/* Opcode VEX.0F 0xad - invalid */
4401
4402
4403/* Opcode VEX.0F 0xae mem/0 - invalid. */
4404/* Opcode VEX.0F 0xae mem/1 - invalid. */
4405
4406/**
4407 * @ opmaps grp15
4408 * @ opcode !11/2
4409 * @ oppfx none
4410 * @ opcpuid sse
4411 * @ opgroup og_sse_mxcsrsm
4412 * @ opxcpttype 5
4413 * @ optest op1=0 -> mxcsr=0
4414 * @ optest op1=0x2083 -> mxcsr=0x2083
4415 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4416 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4417 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4418 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4419 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4420 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4421 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4422 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4423 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4424 */
4425FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4426//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4427//{
4428// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4429// IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4430// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4431// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4432// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4433// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4434// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4435// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4436// IEM_MC_END();
4437// return VINF_SUCCESS;
4438//}
4439
4440
4441/**
4442 * @opmaps vexgrp15
4443 * @opcode !11/3
4444 * @oppfx none
4445 * @opcpuid avx
4446 * @opgroup og_avx_mxcsrsm
4447 * @opxcpttype 5
4448 * @optest mxcsr=0 -> op1=0
4449 * @optest mxcsr=0x2083 -> op1=0x2083
4450 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4451 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4452 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4453 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4454 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4455 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4456 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4457 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4458 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4459 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4460 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4461 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4462 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4463 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4464 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4465 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4466 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4467 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4468 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4469 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4470 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4471 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4472 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4473 * -> value.xcpt=0x6
4474 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4475 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4476 * APMv4 rev 3.17 page 509.
4477 * @todo Test this instruction on AMD Ryzen.
4478 */
4479FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4480{
4481 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4482 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4483 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4485 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4486 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4487 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4488 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4489 IEM_MC_END();
4490}
4491
4492/* Opcode VEX.0F 0xae mem/4 - invalid. */
4493/* Opcode VEX.0F 0xae mem/5 - invalid. */
4494/* Opcode VEX.0F 0xae mem/6 - invalid. */
4495/* Opcode VEX.0F 0xae mem/7 - invalid. */
4496
4497/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4498/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4499/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4500/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4501/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4502/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4503/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4504/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4505
4506/**
4507 * Vex group 15 jump table for memory variant.
4508 */
4509IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4510{ /* pfx: none, 066h, 0f3h, 0f2h */
4511 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4512 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4513 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4514 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4515 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4516 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4517 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4518 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4519};
4520AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4521
4522
4523/** Opcode vex. 0xae. */
4524FNIEMOP_DEF(iemOp_VGrp15)
4525{
4526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4527 if (IEM_IS_MODRM_REG_MODE(bRm))
4528 /* register, register */
4529 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4530
4531 /* memory, register */
4532 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4533 + pVCpu->iem.s.idxPrefix], bRm);
4534}
4535
4536
4537/* Opcode VEX.0F 0xaf - invalid. */
4538
4539/* Opcode VEX.0F 0xb0 - invalid. */
4540/* Opcode VEX.0F 0xb1 - invalid. */
4541/* Opcode VEX.0F 0xb2 - invalid. */
4542/* Opcode VEX.0F 0xb2 - invalid. */
4543/* Opcode VEX.0F 0xb3 - invalid. */
4544/* Opcode VEX.0F 0xb4 - invalid. */
4545/* Opcode VEX.0F 0xb5 - invalid. */
4546/* Opcode VEX.0F 0xb6 - invalid. */
4547/* Opcode VEX.0F 0xb7 - invalid. */
4548/* Opcode VEX.0F 0xb8 - invalid. */
4549/* Opcode VEX.0F 0xb9 - invalid. */
4550/* Opcode VEX.0F 0xba - invalid. */
4551/* Opcode VEX.0F 0xbb - invalid. */
4552/* Opcode VEX.0F 0xbc - invalid. */
4553/* Opcode VEX.0F 0xbd - invalid. */
4554/* Opcode VEX.0F 0xbe - invalid. */
4555/* Opcode VEX.0F 0xbf - invalid. */
4556
4557/* Opcode VEX.0F 0xc0 - invalid. */
4558/* Opcode VEX.66.0F 0xc0 - invalid. */
4559/* Opcode VEX.F3.0F 0xc0 - invalid. */
4560/* Opcode VEX.F2.0F 0xc0 - invalid. */
4561
4562/* Opcode VEX.0F 0xc1 - invalid. */
4563/* Opcode VEX.66.0F 0xc1 - invalid. */
4564/* Opcode VEX.F3.0F 0xc1 - invalid. */
4565/* Opcode VEX.F2.0F 0xc1 - invalid. */
4566
4567/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4568FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4569/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4570FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4571/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4572FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4573/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4574FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4575
4576/* Opcode VEX.0F 0xc3 - invalid */
4577/* Opcode VEX.66.0F 0xc3 - invalid */
4578/* Opcode VEX.F3.0F 0xc3 - invalid */
4579/* Opcode VEX.F2.0F 0xc3 - invalid */
4580
4581/* Opcode VEX.0F 0xc4 - invalid */
4582
4583
4584/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4585FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4586{
4587 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4589 if (IEM_IS_MODRM_REG_MODE(bRm))
4590 {
4591 /*
4592 * Register, register.
4593 */
4594 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4596 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4597 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4598 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4599 IEM_MC_ARG(uint16_t, u16Src, 2);
4600 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4601 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4602 IEM_MC_PREPARE_AVX_USAGE();
4603 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4604 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4605 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4606 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4607 puDst, puSrc, u16Src, bImmArg);
4608 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 }
4612 else
4613 {
4614 /*
4615 * Register, memory.
4616 */
4617 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4619 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4620 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4621 IEM_MC_ARG(uint16_t, u16Src, 2);
4622
4623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4625 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4626 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4627 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4628 IEM_MC_PREPARE_AVX_USAGE();
4629
4630 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4631 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4632 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4633 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4634 puDst, puSrc, u16Src, bImmArg);
4635 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4636
4637 IEM_MC_ADVANCE_RIP_AND_FINISH();
4638 IEM_MC_END();
4639 }
4640}
4641
4642
4643/* Opcode VEX.F3.0F 0xc4 - invalid */
4644/* Opcode VEX.F2.0F 0xc4 - invalid */
4645
4646/* Opcode VEX.0F 0xc5 - invlid */
4647
4648
4649/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4650FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4651{
4652 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4653 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4654 if (IEM_IS_MODRM_REG_MODE(bRm))
4655 {
4656 /*
4657 * Register, register.
4658 */
4659 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4660 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4661 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4662 IEM_MC_LOCAL(uint16_t, u16Dst);
4663 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4664 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4665 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4666 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4667 IEM_MC_PREPARE_AVX_USAGE();
4668 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4669 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4670 pu16Dst, puSrc, bImmArg);
4671 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4672 IEM_MC_ADVANCE_RIP_AND_FINISH();
4673 IEM_MC_END();
4674 }
4675 /* No memory operand. */
4676 else
4677 IEMOP_RAISE_INVALID_OPCODE_RET();
4678}
4679
4680
4681/* Opcode VEX.F3.0F 0xc5 - invalid */
4682/* Opcode VEX.F2.0F 0xc5 - invalid */
4683
4684
4685#define VSHUFP_X(a_Instr) \
4686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4687 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4688 { \
4689 /* \
4690 * Register, register. \
4691 */ \
4692 if (pVCpu->iem.s.uVexLength) \
4693 { \
4694 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4695 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4696 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4697 IEM_MC_LOCAL(RTUINT256U, uDst); \
4698 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4699 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4700 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4701 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4702 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4703 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4704 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4705 IEM_MC_PREPARE_AVX_USAGE(); \
4706 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4707 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4708 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4709 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4710 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4711 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4712 IEM_MC_END(); \
4713 } \
4714 else \
4715 { \
4716 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4717 IEM_MC_BEGIN(4, 0, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4718 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4719 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4720 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4721 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4722 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4724 IEM_MC_PREPARE_AVX_USAGE(); \
4725 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4726 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4727 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4728 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4729 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4730 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4731 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4732 IEM_MC_END(); \
4733 } \
4734 } \
4735 else \
4736 { \
4737 /* \
4738 * Register, memory. \
4739 */ \
4740 if (pVCpu->iem.s.uVexLength) \
4741 { \
4742 IEM_MC_BEGIN(4, 4, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4743 IEM_MC_LOCAL(RTUINT256U, uDst); \
4744 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4745 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4747 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4748 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4749 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4751 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4752 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4753 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4754 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4755 IEM_MC_PREPARE_AVX_USAGE(); \
4756 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4757 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4758 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4759 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4760 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4761 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4762 IEM_MC_END(); \
4763 } \
4764 else \
4765 { \
4766 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0); \
4767 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4769 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4770 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4771 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4773 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4774 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4775 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4776 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4777 IEM_MC_PREPARE_AVX_USAGE(); \
4778 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4779 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4780 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4781 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4782 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4783 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4784 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4785 IEM_MC_END(); \
4786 } \
4787 } \
4788 (void)0
4789
4790/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4791FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4792{
4793 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4794 VSHUFP_X(vshufps);
4795}
4796
4797
4798/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4799FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4800{
4801 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4802 VSHUFP_X(vshufpd);
4803}
4804#undef VSHUFP_X
4805
4806
4807/* Opcode VEX.F3.0F 0xc6 - invalid */
4808/* Opcode VEX.F2.0F 0xc6 - invalid */
4809
4810/* Opcode VEX.0F 0xc7 - invalid */
4811/* Opcode VEX.66.0F 0xc7 - invalid */
4812/* Opcode VEX.F3.0F 0xc7 - invalid */
4813/* Opcode VEX.F2.0F 0xc7 - invalid */
4814
4815/* Opcode VEX.0F 0xc8 - invalid */
4816/* Opcode VEX.0F 0xc9 - invalid */
4817/* Opcode VEX.0F 0xca - invalid */
4818/* Opcode VEX.0F 0xcb - invalid */
4819/* Opcode VEX.0F 0xcc - invalid */
4820/* Opcode VEX.0F 0xcd - invalid */
4821/* Opcode VEX.0F 0xce - invalid */
4822/* Opcode VEX.0F 0xcf - invalid */
4823
4824
4825/* Opcode VEX.0F 0xd0 - invalid */
4826/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4827FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4828/* Opcode VEX.F3.0F 0xd0 - invalid */
4829/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4830FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4831
4832/* Opcode VEX.0F 0xd1 - invalid */
4833/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4834FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4835{
4836 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4837 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4838 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4839}
4840
4841/* Opcode VEX.F3.0F 0xd1 - invalid */
4842/* Opcode VEX.F2.0F 0xd1 - invalid */
4843
4844/* Opcode VEX.0F 0xd2 - invalid */
4845/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4846FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4847{
4848 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4849 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4850 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4851}
4852
4853/* Opcode VEX.F3.0F 0xd2 - invalid */
4854/* Opcode VEX.F2.0F 0xd2 - invalid */
4855
4856/* Opcode VEX.0F 0xd3 - invalid */
4857/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4858FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4859{
4860 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4861 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4862 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4863}
4864
4865/* Opcode VEX.F3.0F 0xd3 - invalid */
4866/* Opcode VEX.F2.0F 0xd3 - invalid */
4867
4868/* Opcode VEX.0F 0xd4 - invalid */
4869
4870
4871/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4872FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4873{
4874 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4875 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4876 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4877}
4878
4879
4880/* Opcode VEX.F3.0F 0xd4 - invalid */
4881/* Opcode VEX.F2.0F 0xd4 - invalid */
4882
4883/* Opcode VEX.0F 0xd5 - invalid */
4884
4885
4886/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4887FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4888{
4889 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4890 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4891 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4892}
4893
4894
4895/* Opcode VEX.F3.0F 0xd5 - invalid */
4896/* Opcode VEX.F2.0F 0xd5 - invalid */
4897
4898/* Opcode VEX.0F 0xd6 - invalid */
4899
4900/**
4901 * @opcode 0xd6
4902 * @oppfx 0x66
4903 * @opcpuid avx
4904 * @opgroup og_avx_pcksclr_datamove
4905 * @opxcpttype none
4906 * @optest op1=-1 op2=2 -> op1=2
4907 * @optest op1=0 op2=-42 -> op1=-42
4908 */
4909FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4910{
4911 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4913 if (IEM_IS_MODRM_REG_MODE(bRm))
4914 {
4915 /*
4916 * Register, register.
4917 */
4918 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4919 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4920
4921 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4922 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4923
4924 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4925 IEM_GET_MODRM_REG(pVCpu, bRm));
4926 IEM_MC_ADVANCE_RIP_AND_FINISH();
4927 IEM_MC_END();
4928 }
4929 else
4930 {
4931 /*
4932 * Memory, register.
4933 */
4934 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
4935 IEM_MC_LOCAL(uint64_t, uSrc);
4936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4937
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4939 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4940 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4941 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4942
4943 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4944 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4945
4946 IEM_MC_ADVANCE_RIP_AND_FINISH();
4947 IEM_MC_END();
4948 }
4949}
4950
4951/* Opcode VEX.F3.0F 0xd6 - invalid */
4952/* Opcode VEX.F2.0F 0xd6 - invalid */
4953
4954
4955/* Opcode VEX.0F 0xd7 - invalid */
4956
4957/** Opcode VEX.66.0F 0xd7 - */
4958FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4959{
4960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4961 /* Docs says register only. */
4962 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4963 {
4964 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4965 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4966 if (pVCpu->iem.s.uVexLength)
4967 {
4968 IEM_MC_BEGIN(2, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
4969 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4970 IEM_MC_ARG(uint64_t *, puDst, 0);
4971 IEM_MC_LOCAL(RTUINT256U, uSrc);
4972 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4973 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4974 IEM_MC_PREPARE_AVX_USAGE();
4975 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4976 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4977 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4978 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4979 IEM_MC_ADVANCE_RIP_AND_FINISH();
4980 IEM_MC_END();
4981 }
4982 else
4983 {
4984 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
4985 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4986 IEM_MC_ARG(uint64_t *, puDst, 0);
4987 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4988 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4989 IEM_MC_PREPARE_AVX_USAGE();
4990 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4991 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4992 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4993 IEM_MC_ADVANCE_RIP_AND_FINISH();
4994 IEM_MC_END();
4995 }
4996 }
4997 else
4998 IEMOP_RAISE_INVALID_OPCODE_RET();
4999}
5000
5001
5002/* Opcode VEX.F3.0F 0xd7 - invalid */
5003/* Opcode VEX.F2.0F 0xd7 - invalid */
5004
5005
5006/* Opcode VEX.0F 0xd8 - invalid */
5007
5008/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5009FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5010{
5011 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5012 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5013 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5014}
5015
5016
5017/* Opcode VEX.F3.0F 0xd8 - invalid */
5018/* Opcode VEX.F2.0F 0xd8 - invalid */
5019
5020/* Opcode VEX.0F 0xd9 - invalid */
5021
5022
5023/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5024FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5025{
5026 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5027 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5028 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5029}
5030
5031
5032/* Opcode VEX.F3.0F 0xd9 - invalid */
5033/* Opcode VEX.F2.0F 0xd9 - invalid */
5034
5035/* Opcode VEX.0F 0xda - invalid */
5036
5037
5038/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5039FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5040{
5041 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5042 IEMOPMEDIAF3_INIT_VARS(vpminub);
5043 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5044}
5045
5046
5047/* Opcode VEX.F3.0F 0xda - invalid */
5048/* Opcode VEX.F2.0F 0xda - invalid */
5049
5050/* Opcode VEX.0F 0xdb - invalid */
5051
5052
5053/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5054FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5055{
5056 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5057 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5058 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5059}
5060
5061
5062/* Opcode VEX.F3.0F 0xdb - invalid */
5063/* Opcode VEX.F2.0F 0xdb - invalid */
5064
5065/* Opcode VEX.0F 0xdc - invalid */
5066
5067
5068/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5069FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5070{
5071 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5072 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5073 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5074}
5075
5076
5077/* Opcode VEX.F3.0F 0xdc - invalid */
5078/* Opcode VEX.F2.0F 0xdc - invalid */
5079
5080/* Opcode VEX.0F 0xdd - invalid */
5081
5082
5083/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5084FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5085{
5086 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5087 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5088 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5089}
5090
5091
5092/* Opcode VEX.F3.0F 0xdd - invalid */
5093/* Opcode VEX.F2.0F 0xdd - invalid */
5094
5095/* Opcode VEX.0F 0xde - invalid */
5096
5097
5098/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5099FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5100{
5101 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5102 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
5103 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5104}
5105
5106
5107/* Opcode VEX.F3.0F 0xde - invalid */
5108/* Opcode VEX.F2.0F 0xde - invalid */
5109
5110/* Opcode VEX.0F 0xdf - invalid */
5111
5112
5113/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5114FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5115{
5116 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5117 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5118 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5119}
5120
5121
5122/* Opcode VEX.F3.0F 0xdf - invalid */
5123/* Opcode VEX.F2.0F 0xdf - invalid */
5124
5125/* Opcode VEX.0F 0xe0 - invalid */
5126
5127
5128/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5129FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5130{
5131 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5132 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5133 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5134}
5135
5136
5137/* Opcode VEX.F3.0F 0xe0 - invalid */
5138/* Opcode VEX.F2.0F 0xe0 - invalid */
5139
5140/* Opcode VEX.0F 0xe1 - invalid */
5141/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5142FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5143{
5144 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5145 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5146 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5147}
5148
5149/* Opcode VEX.F3.0F 0xe1 - invalid */
5150/* Opcode VEX.F2.0F 0xe1 - invalid */
5151
5152/* Opcode VEX.0F 0xe2 - invalid */
5153/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5154FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5155{
5156 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5157 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5158 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5159}
5160
5161/* Opcode VEX.F3.0F 0xe2 - invalid */
5162/* Opcode VEX.F2.0F 0xe2 - invalid */
5163
5164/* Opcode VEX.0F 0xe3 - invalid */
5165
5166
5167/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5168FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5169{
5170 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5171 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5172 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5173}
5174
5175
5176/* Opcode VEX.F3.0F 0xe3 - invalid */
5177/* Opcode VEX.F2.0F 0xe3 - invalid */
5178
5179/* Opcode VEX.0F 0xe4 - invalid */
5180
5181
5182/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5183FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5184{
5185 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5186 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5187 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5188}
5189
5190
5191/* Opcode VEX.F3.0F 0xe4 - invalid */
5192/* Opcode VEX.F2.0F 0xe4 - invalid */
5193
5194/* Opcode VEX.0F 0xe5 - invalid */
5195
5196
5197/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5198FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5199{
5200 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5201 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5203}
5204
5205
5206/* Opcode VEX.F3.0F 0xe5 - invalid */
5207/* Opcode VEX.F2.0F 0xe5 - invalid */
5208
5209/* Opcode VEX.0F 0xe6 - invalid */
5210/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5211FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5212/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5213FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5214/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5215FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5216
5217
5218/* Opcode VEX.0F 0xe7 - invalid */
5219
5220/**
5221 * @opcode 0xe7
5222 * @opcodesub !11 mr/reg
5223 * @oppfx 0x66
5224 * @opcpuid avx
5225 * @opgroup og_avx_cachect
5226 * @opxcpttype 1
5227 * @optest op1=-1 op2=2 -> op1=2
5228 * @optest op1=0 op2=-42 -> op1=-42
5229 */
5230FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5231{
5232 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5233 Assert(pVCpu->iem.s.uVexLength <= 1);
5234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5235 if (IEM_IS_MODRM_MEM_MODE(bRm))
5236 {
5237 if (pVCpu->iem.s.uVexLength == 0)
5238 {
5239 /*
5240 * 128-bit: Memory, register.
5241 */
5242 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5243 IEM_MC_LOCAL(RTUINT128U, uSrc);
5244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5245
5246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5247 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5248 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5249 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5250
5251 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5252 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5253
5254 IEM_MC_ADVANCE_RIP_AND_FINISH();
5255 IEM_MC_END();
5256 }
5257 else
5258 {
5259 /*
5260 * 256-bit: Memory, register.
5261 */
5262 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5263 IEM_MC_LOCAL(RTUINT256U, uSrc);
5264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5265
5266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5267 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5269 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5270
5271 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5272 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5273
5274 IEM_MC_ADVANCE_RIP_AND_FINISH();
5275 IEM_MC_END();
5276 }
5277 }
5278 /**
5279 * @opdone
5280 * @opmnemonic udvex660fe7reg
5281 * @opcode 0xe7
5282 * @opcodesub 11 mr/reg
5283 * @oppfx 0x66
5284 * @opunused immediate
5285 * @opcpuid avx
5286 * @optest ->
5287 */
5288 else
5289 IEMOP_RAISE_INVALID_OPCODE_RET();
5290}
5291
5292/* Opcode VEX.F3.0F 0xe7 - invalid */
5293/* Opcode VEX.F2.0F 0xe7 - invalid */
5294
5295
5296/* Opcode VEX.0F 0xe8 - invalid */
5297
5298
5299/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5300FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5301{
5302 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5303 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5304 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5305}
5306
5307
5308/* Opcode VEX.F3.0F 0xe8 - invalid */
5309/* Opcode VEX.F2.0F 0xe8 - invalid */
5310
5311/* Opcode VEX.0F 0xe9 - invalid */
5312
5313
5314/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5315FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5316{
5317 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5318 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5319 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5320}
5321
5322
5323/* Opcode VEX.F3.0F 0xe9 - invalid */
5324/* Opcode VEX.F2.0F 0xe9 - invalid */
5325
5326/* Opcode VEX.0F 0xea - invalid */
5327
5328
5329/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5330FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5331{
5332 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5333 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5334 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5335}
5336
5337
5338/* Opcode VEX.F3.0F 0xea - invalid */
5339/* Opcode VEX.F2.0F 0xea - invalid */
5340
5341/* Opcode VEX.0F 0xeb - invalid */
5342
5343
5344/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5345FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5346{
5347 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5348 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5349 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5350}
5351
5352
5353
5354/* Opcode VEX.F3.0F 0xeb - invalid */
5355/* Opcode VEX.F2.0F 0xeb - invalid */
5356
5357/* Opcode VEX.0F 0xec - invalid */
5358
5359
5360/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5361FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5362{
5363 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5364 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5366}
5367
5368
5369/* Opcode VEX.F3.0F 0xec - invalid */
5370/* Opcode VEX.F2.0F 0xec - invalid */
5371
5372/* Opcode VEX.0F 0xed - invalid */
5373
5374
5375/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5376FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5377{
5378 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5379 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5380 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5381}
5382
5383
5384/* Opcode VEX.F3.0F 0xed - invalid */
5385/* Opcode VEX.F2.0F 0xed - invalid */
5386
5387/* Opcode VEX.0F 0xee - invalid */
5388
5389
5390/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5391FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5392{
5393 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5394 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5395 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5396}
5397
5398
5399/* Opcode VEX.F3.0F 0xee - invalid */
5400/* Opcode VEX.F2.0F 0xee - invalid */
5401
5402
5403/* Opcode VEX.0F 0xef - invalid */
5404
5405
5406/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5407FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5408{
5409 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5410 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5411 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5412}
5413
5414
5415/* Opcode VEX.F3.0F 0xef - invalid */
5416/* Opcode VEX.F2.0F 0xef - invalid */
5417
5418/* Opcode VEX.0F 0xf0 - invalid */
5419/* Opcode VEX.66.0F 0xf0 - invalid */
5420
5421
5422/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5423FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5424{
5425 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5426 Assert(pVCpu->iem.s.uVexLength <= 1);
5427 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5428 if (IEM_IS_MODRM_REG_MODE(bRm))
5429 {
5430 /*
5431 * Register, register - (not implemented, assuming it raises \#UD).
5432 */
5433 IEMOP_RAISE_INVALID_OPCODE_RET();
5434 }
5435 else if (pVCpu->iem.s.uVexLength == 0)
5436 {
5437 /*
5438 * Register, memory128.
5439 */
5440 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5441 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5443
5444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5445 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5446 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5447 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5448
5449 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5450 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5451
5452 IEM_MC_ADVANCE_RIP_AND_FINISH();
5453 IEM_MC_END();
5454 }
5455 else
5456 {
5457 /*
5458 * Register, memory256.
5459 */
5460 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
5461 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5463
5464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5465 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5467 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5468
5469 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5470 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5471
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 }
5475}
5476
5477
5478/* Opcode VEX.0F 0xf1 - invalid */
5479/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5480FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5481{
5482 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5483 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5485}
5486
5487/* Opcode VEX.F2.0F 0xf1 - invalid */
5488
5489/* Opcode VEX.0F 0xf2 - invalid */
5490/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5491FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5492{
5493 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5494 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5496}
5497/* Opcode VEX.F2.0F 0xf2 - invalid */
5498
5499/* Opcode VEX.0F 0xf3 - invalid */
5500/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5501FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5502{
5503 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5504 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5505 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5506}
5507/* Opcode VEX.F2.0F 0xf3 - invalid */
5508
5509/* Opcode VEX.0F 0xf4 - invalid */
5510
5511
5512/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5513FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5514{
5515 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5516 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5517 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5518}
5519
5520
5521/* Opcode VEX.F2.0F 0xf4 - invalid */
5522
5523/* Opcode VEX.0F 0xf5 - invalid */
5524
5525
5526/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5527FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5528{
5529 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5530 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5531 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5532}
5533
5534
5535/* Opcode VEX.F2.0F 0xf5 - invalid */
5536
5537/* Opcode VEX.0F 0xf6 - invalid */
5538
5539
5540/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5541FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5542{
5543 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5544 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5545 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5546}
5547
5548
5549/* Opcode VEX.F2.0F 0xf6 - invalid */
5550
5551/* Opcode VEX.0F 0xf7 - invalid */
5552/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5553FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5554/* Opcode VEX.F2.0F 0xf7 - invalid */
5555
5556/* Opcode VEX.0F 0xf8 - invalid */
5557
5558
5559/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5560FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5561{
5562 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5563 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5564 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5565}
5566
5567
5568/* Opcode VEX.F2.0F 0xf8 - invalid */
5569
5570/* Opcode VEX.0F 0xf9 - invalid */
5571
5572
5573/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5574FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5575{
5576 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5577 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5578 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5579}
5580
5581
5582/* Opcode VEX.F2.0F 0xf9 - invalid */
5583
5584/* Opcode VEX.0F 0xfa - invalid */
5585
5586
5587/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5588FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5589{
5590 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5591 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5592 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5593}
5594
5595
5596/* Opcode VEX.F2.0F 0xfa - invalid */
5597
5598/* Opcode VEX.0F 0xfb - invalid */
5599
5600
5601/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5602FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5603{
5604 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5605 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5606 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5607}
5608
5609
5610/* Opcode VEX.F2.0F 0xfb - invalid */
5611
5612/* Opcode VEX.0F 0xfc - invalid */
5613
5614
5615/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5616FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5617{
5618 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5619 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5620 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5621}
5622
5623
5624/* Opcode VEX.F2.0F 0xfc - invalid */
5625
5626/* Opcode VEX.0F 0xfd - invalid */
5627
5628
5629/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5630FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5631{
5632 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5633 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5634 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5635}
5636
5637
5638/* Opcode VEX.F2.0F 0xfd - invalid */
5639
5640/* Opcode VEX.0F 0xfe - invalid */
5641
5642
5643/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5644FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5645{
5646 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5647 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5648 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5649}
5650
5651
5652/* Opcode VEX.F2.0F 0xfe - invalid */
5653
5654
5655/** Opcode **** 0x0f 0xff - UD0 */
5656FNIEMOP_DEF(iemOp_vud0)
5657{
5658/** @todo testcase: vud0 */
5659 IEMOP_MNEMONIC(vud0, "vud0");
5660 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5661 {
5662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5663 if (IEM_IS_MODRM_MEM_MODE(bRm))
5664 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5665 }
5666 IEMOP_HLP_DONE_DECODING();
5667 IEMOP_RAISE_INVALID_OPCODE_RET();
5668}
5669
5670
5671
5672/**
5673 * VEX opcode map \#1.
5674 *
5675 * @sa g_apfnTwoByteMap
5676 */
5677const PFNIEMOP g_apfnVexMap1[] =
5678{
5679 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5680 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5681 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5682 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5683 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5684 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5685 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5686 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5687 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5688 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5689 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5690 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5691 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5692 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5693 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5694 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5695 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5696
5697 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5698 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5699 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5700 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5701 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5702 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5703 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5704 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5705 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5706 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5707 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5708 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5709 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5710 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5711 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5712 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5713
5714 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5715 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5716 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5717 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5718 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5719 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5720 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5721 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5722 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5723 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5724 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5725 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5726 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5727 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5728 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5729 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5730
5731 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5733 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5734 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5735 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5736 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5737 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5738 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5739 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5740 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5741 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5742 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5743 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5744 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5745 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5746 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5747
5748 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5750 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5751 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5752 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5753 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5754 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5755 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5756 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5757 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5758 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5759 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5760 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5761 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5762 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5763 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5764
5765 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5766 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5767 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5768 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5769 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5770 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5771 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5772 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5773 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5774 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5775 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5776 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5777 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5778 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5779 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5780 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5781
5782 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5783 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5784 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5785 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5786 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5787 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5788 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5789 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5790 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5791 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5792 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5793 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5794 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5795 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5796 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5797 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5798
5799 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5800 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5801 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5802 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5803 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5804 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5805 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5806 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5807 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5808 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5809 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5810 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5811 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5812 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5813 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5814 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5815
5816 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5817 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5818 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5819 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5820 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5821 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5822 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5823 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5824 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5825 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5826 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5827 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5828 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5829 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5830 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5831 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5832
5833 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5834 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5835 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5836 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5837 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5838 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5839 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5840 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5841 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5842 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5843 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5844 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5845 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5846 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5847 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5848 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5849
5850 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5851 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5852 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5853 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5854 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5855 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5856 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5857 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5858 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5859 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5860 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5861 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5862 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5863 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5864 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5865 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5866
5867 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5868 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5869 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5870 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5871 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5875 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5876 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5877 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5878 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5879 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5880 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5881 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5882 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5883
5884 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5885 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5887 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5888 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5889 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5890 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5891 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5893 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5894 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5895 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5896 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5897 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5898 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5899 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5900
5901 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5902 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5903 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5904 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5905 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5906 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5907 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5908 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5909 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5910 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5911 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5912 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5913 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5914 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5915 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5916 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5917
5918 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5919 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5920 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5921 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5922 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5923 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5924 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5925 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5926 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5927 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5928 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5929 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5930 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5931 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5932 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5933 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5934
5935 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5936 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5937 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5938 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5939 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5940 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5941 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5942 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5943 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5944 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5945 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5946 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5947 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5948 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5949 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5950 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5951};
5952AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5953/** @} */
5954
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette