VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 104074

Last change on this file since 104074 was 104058, checked in by vboxsync, 11 months ago

VMM/IEM: Make IEM_MC_FETCH_YREG_U128() take a iDQWord parameter to select the whether to fetch the low or high half of the 256-bit register, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 220.0 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 104058 2024-03-26 13:41:59Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(RTUINT256U, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61
62 IEM_MC_LOCAL(RTUINT256U, uSrc2);
63 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
64 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
65
66 IEM_MC_LOCAL(RTUINT256U, uDst);
67 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
68
69 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
70
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81
82 IEM_MC_ARG(PRTUINT128U, puDst, 0);
83 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
84 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
85 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
86 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
87 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
104 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
105 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
106 IEM_MC_PREPARE_AVX_USAGE();
107
108 IEM_MC_LOCAL(RTUINT256U, uSrc2);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
111
112 IEM_MC_LOCAL(RTUINT256U, uSrc1);
113 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
114 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
115
116 IEM_MC_LOCAL(RTUINT256U, uDst);
117 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
118
119 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
120
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
130 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
132 IEM_MC_PREPARE_AVX_USAGE();
133
134 IEM_MC_LOCAL(RTUINT128U, uSrc2);
135 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
136 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137
138 IEM_MC_ARG(PRTUINT128U, puDst, 0);
139 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
140 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
141 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
142
143 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
144 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
145
146 IEM_MC_ADVANCE_RIP_AND_FINISH();
147 IEM_MC_END();
148 }
149 }
150}
151
152
153/**
154 * Common worker for AVX2 instructions on the forms:
155 * - vpxxx xmm0, xmm1, xmm2/mem128
156 * - vpxxx ymm0, ymm1, ymm2/mem256
157 *
158 * Takes function table for function w/o implicit state parameter.
159 *
160 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 if (pVCpu->iem.s.uVexLength)
171 {
172 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
173 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
174 IEM_MC_LOCAL(RTUINT256U, uDst);
175 IEM_MC_LOCAL(RTUINT256U, uSrc1);
176 IEM_MC_LOCAL(RTUINT256U, uSrc2);
177 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
178 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
181 IEM_MC_PREPARE_AVX_USAGE();
182 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
183 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
185 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
193 IEM_MC_ARG(PRTUINT128U, puDst, 0);
194 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
195 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
197 IEM_MC_PREPARE_AVX_USAGE();
198 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
200 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
202 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206 }
207 else
208 {
209 /*
210 * Register, memory.
211 */
212 if (pVCpu->iem.s.uVexLength)
213 {
214 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
215 IEM_MC_LOCAL(RTUINT256U, uDst);
216 IEM_MC_LOCAL(RTUINT256U, uSrc1);
217 IEM_MC_LOCAL(RTUINT256U, uSrc2);
218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
219 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
220 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
221 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
222
223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
224 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
226 IEM_MC_PREPARE_AVX_USAGE();
227
228 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
229 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
231 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
232
233 IEM_MC_ADVANCE_RIP_AND_FINISH();
234 IEM_MC_END();
235 }
236 else
237 {
238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
239 IEM_MC_LOCAL(RTUINT128U, uSrc2);
240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
241 IEM_MC_ARG(PRTUINT128U, puDst, 0);
242 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
243 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
244
245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
246 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249
250 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
251 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
252 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
254 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
255
256 IEM_MC_ADVANCE_RIP_AND_FINISH();
257 IEM_MC_END();
258 }
259 }
260}
261
262
263/**
264 * Common worker for AVX2 instructions on the forms:
265 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
266 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
267 *
268 * The 128-bit memory version of this instruction may elect to skip fetching the
269 * lower 64 bits of the operand. We, however, do not.
270 *
271 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
272 */
273FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
274{
275 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
282 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
283 *
284 * The 128-bit memory version of this instruction may elect to skip fetching the
285 * higher 64 bits of the operand. We, however, do not.
286 *
287 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
288 */
289FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
290{
291 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
292}
293
294
295/**
296 * Common worker for AVX2 instructions on the forms:
297 * - vpxxx xmm0, xmm1/mem128
298 * - vpxxx ymm0, ymm1/mem256
299 *
300 * Takes function table for function w/o implicit state parameter.
301 *
302 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
303 */
304FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
305{
306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
307 if (IEM_IS_MODRM_REG_MODE(bRm))
308 {
309 /*
310 * Register, register.
311 */
312 if (pVCpu->iem.s.uVexLength)
313 {
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
316 IEM_MC_LOCAL(RTUINT256U, uDst);
317 IEM_MC_LOCAL(RTUINT256U, uSrc);
318 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
319 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
321 IEM_MC_PREPARE_AVX_USAGE();
322 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
323 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
324 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
325 IEM_MC_ADVANCE_RIP_AND_FINISH();
326 IEM_MC_END();
327 }
328 else
329 {
330 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
331 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
332 IEM_MC_ARG(PRTUINT128U, puDst, 0);
333 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
334 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
335 IEM_MC_PREPARE_AVX_USAGE();
336 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
338 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
339 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
340 IEM_MC_ADVANCE_RIP_AND_FINISH();
341 IEM_MC_END();
342 }
343 }
344 else
345 {
346 /*
347 * Register, memory.
348 */
349 if (pVCpu->iem.s.uVexLength)
350 {
351 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
352 IEM_MC_LOCAL(RTUINT256U, uDst);
353 IEM_MC_LOCAL(RTUINT256U, uSrc);
354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
355 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
356 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
357
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
359 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
360 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
361 IEM_MC_PREPARE_AVX_USAGE();
362
363 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
364 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
365 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
366
367 IEM_MC_ADVANCE_RIP_AND_FINISH();
368 IEM_MC_END();
369 }
370 else
371 {
372 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
373 IEM_MC_LOCAL(RTUINT128U, uSrc);
374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
375 IEM_MC_ARG(PRTUINT128U, puDst, 0);
376 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
377
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
379 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
380 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
381 IEM_MC_PREPARE_AVX_USAGE();
382
383 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
384 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
385 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
386 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
387
388 IEM_MC_ADVANCE_RIP_AND_FINISH();
389 IEM_MC_END();
390 }
391 }
392}
393
394
395/* Opcode VEX.0F 0x00 - invalid */
396/* Opcode VEX.0F 0x01 - invalid */
397/* Opcode VEX.0F 0x02 - invalid */
398/* Opcode VEX.0F 0x03 - invalid */
399/* Opcode VEX.0F 0x04 - invalid */
400/* Opcode VEX.0F 0x05 - invalid */
401/* Opcode VEX.0F 0x06 - invalid */
402/* Opcode VEX.0F 0x07 - invalid */
403/* Opcode VEX.0F 0x08 - invalid */
404/* Opcode VEX.0F 0x09 - invalid */
405/* Opcode VEX.0F 0x0a - invalid */
406
407/** Opcode VEX.0F 0x0b. */
408FNIEMOP_DEF(iemOp_vud2)
409{
410 IEMOP_MNEMONIC(vud2, "vud2");
411 IEMOP_RAISE_INVALID_OPCODE_RET();
412}
413
414/* Opcode VEX.0F 0x0c - invalid */
415/* Opcode VEX.0F 0x0d - invalid */
416/* Opcode VEX.0F 0x0e - invalid */
417/* Opcode VEX.0F 0x0f - invalid */
418
419
420/**
421 * @opcode 0x10
422 * @oppfx none
423 * @opcpuid avx
424 * @opgroup og_avx_simdfp_datamove
425 * @opxcpttype 4UA
426 * @optest op1=1 op2=2 -> op1=2
427 * @optest op1=0 op2=-22 -> op1=-22
428 */
429FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
430{
431 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
432 Assert(pVCpu->iem.s.uVexLength <= 1);
433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
434 if (IEM_IS_MODRM_REG_MODE(bRm))
435 {
436 /*
437 * Register, register.
438 */
439 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
440 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
441 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
442 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
443 if (pVCpu->iem.s.uVexLength == 0)
444 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
445 IEM_GET_MODRM_RM(pVCpu, bRm));
446 else
447 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
448 IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452 else if (pVCpu->iem.s.uVexLength == 0)
453 {
454 /*
455 * 128-bit: Register, Memory
456 */
457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
458 IEM_MC_LOCAL(RTUINT128U, uSrc);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460
461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
462 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
463 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
464 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
465
466 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
467 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
468
469 IEM_MC_ADVANCE_RIP_AND_FINISH();
470 IEM_MC_END();
471 }
472 else
473 {
474 /*
475 * 256-bit: Register, Memory
476 */
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uSrc);
479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
480
481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
482 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
485
486 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
487 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
488
489 IEM_MC_ADVANCE_RIP_AND_FINISH();
490 IEM_MC_END();
491 }
492}
493
494
495/**
496 * @opcode 0x10
497 * @oppfx 0x66
498 * @opcpuid avx
499 * @opgroup og_avx_simdfp_datamove
500 * @opxcpttype 4UA
501 * @optest op1=1 op2=2 -> op1=2
502 * @optest op1=0 op2=-22 -> op1=-22
503 */
504FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
505{
506 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
507 Assert(pVCpu->iem.s.uVexLength <= 1);
508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
509 if (IEM_IS_MODRM_REG_MODE(bRm))
510 {
511 /*
512 * Register, register.
513 */
514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
515 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
518 if (pVCpu->iem.s.uVexLength == 0)
519 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
520 IEM_GET_MODRM_RM(pVCpu, bRm));
521 else
522 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
523 IEM_GET_MODRM_RM(pVCpu, bRm));
524 IEM_MC_ADVANCE_RIP_AND_FINISH();
525 IEM_MC_END();
526 }
527 else if (pVCpu->iem.s.uVexLength == 0)
528 {
529 /*
530 * 128-bit: Memory, register.
531 */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEM_MC_LOCAL(RTUINT128U, uSrc);
534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
535
536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
537 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
538 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
539 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
540
541 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
542 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
543
544 IEM_MC_ADVANCE_RIP_AND_FINISH();
545 IEM_MC_END();
546 }
547 else
548 {
549 /*
550 * 256-bit: Memory, register.
551 */
552 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
553 IEM_MC_LOCAL(RTUINT256U, uSrc);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
555
556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
560
561 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
562 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
591 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
592 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
593 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
594 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
595 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
596 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
597 IEM_MC_ADVANCE_RIP_AND_FINISH();
598 IEM_MC_END();
599 }
600 else
601 {
602 /**
603 * @opdone
604 * @opcode 0x10
605 * @oppfx 0xf3
606 * @opcodesub !11 mr/reg
607 * @opcpuid avx
608 * @opgroup og_avx_simdfp_datamove
609 * @opxcpttype 5
610 * @opfunction iemOp_vmovss_Vss_Hss_Wss
611 * @optest op1=1 op2=2 -> op1=2
612 * @optest op1=0 op2=-22 -> op1=-22
613 */
614 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
615 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
616 IEM_MC_LOCAL(uint32_t, uSrc);
617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
618
619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
620 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
621 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
622 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
623
624 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
625 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
626
627 IEM_MC_ADVANCE_RIP_AND_FINISH();
628 IEM_MC_END();
629 }
630}
631
632
633FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
634{
635 Assert(pVCpu->iem.s.uVexLength <= 1);
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (IEM_IS_MODRM_REG_MODE(bRm))
638 {
639 /**
640 * @opcode 0x10
641 * @oppfx 0xf2
642 * @opcodesub 11 mr/reg
643 * @opcpuid avx
644 * @opgroup og_avx_simdfp_datamerge
645 * @opxcpttype 5
646 * @optest op1=1 op2=0 op3=2 -> op1=2
647 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
648 * @optest op1=3 op2=-1 op3=0x77 ->
649 * op1=0xffffffffffffffff0000000000000077
650 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
651 */
652 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
653 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
654 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
655
656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
657 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
658 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
659 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
660 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
661 IEM_MC_ADVANCE_RIP_AND_FINISH();
662 IEM_MC_END();
663 }
664 else
665 {
666 /**
667 * @opdone
668 * @opcode 0x10
669 * @oppfx 0xf2
670 * @opcodesub !11 mr/reg
671 * @opcpuid avx
672 * @opgroup og_avx_simdfp_datamove
673 * @opxcpttype 5
674 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
675 * @optest op1=1 op2=2 -> op1=2
676 * @optest op1=0 op2=-22 -> op1=-22
677 */
678 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
679 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
680 IEM_MC_LOCAL(uint64_t, uSrc);
681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
682
683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
684 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
685 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
686 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
687
688 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
689 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
690
691 IEM_MC_ADVANCE_RIP_AND_FINISH();
692 IEM_MC_END();
693 }
694}
695
696
697/**
698 * @opcode 0x11
699 * @oppfx none
700 * @opcpuid avx
701 * @opgroup og_avx_simdfp_datamove
702 * @opxcpttype 4UA
703 * @optest op1=1 op2=2 -> op1=2
704 * @optest op1=0 op2=-22 -> op1=-22
705 */
706FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
707{
708 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
709 Assert(pVCpu->iem.s.uVexLength <= 1);
710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
711 if (IEM_IS_MODRM_REG_MODE(bRm))
712 {
713 /*
714 * Register, register.
715 */
716 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
717 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
720 if (pVCpu->iem.s.uVexLength == 0)
721 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
722 IEM_GET_MODRM_REG(pVCpu, bRm));
723 else
724 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
725 IEM_GET_MODRM_REG(pVCpu, bRm));
726 IEM_MC_ADVANCE_RIP_AND_FINISH();
727 IEM_MC_END();
728 }
729 else if (pVCpu->iem.s.uVexLength == 0)
730 {
731 /*
732 * 128-bit: Memory, register.
733 */
734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
735 IEM_MC_LOCAL(RTUINT128U, uSrc);
736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
737
738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
739 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
742
743 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
744 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749 else
750 {
751 /*
752 * 256-bit: Memory, register.
753 */
754 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
755 IEM_MC_LOCAL(RTUINT256U, uSrc);
756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
757
758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
759 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
760 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
761 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
762
763 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
765
766 IEM_MC_ADVANCE_RIP_AND_FINISH();
767 IEM_MC_END();
768 }
769}
770
771
772/**
773 * @opcode 0x11
774 * @oppfx 0x66
775 * @opcpuid avx
776 * @opgroup og_avx_simdfp_datamove
777 * @opxcpttype 4UA
778 * @optest op1=1 op2=2 -> op1=2
779 * @optest op1=0 op2=-22 -> op1=-22
780 */
781FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
782{
783 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
784 Assert(pVCpu->iem.s.uVexLength <= 1);
785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
786 if (IEM_IS_MODRM_REG_MODE(bRm))
787 {
788 /*
789 * Register, register.
790 */
791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
792 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
794 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
795 if (pVCpu->iem.s.uVexLength == 0)
796 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
797 IEM_GET_MODRM_REG(pVCpu, bRm));
798 else
799 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
800 IEM_GET_MODRM_REG(pVCpu, bRm));
801 IEM_MC_ADVANCE_RIP_AND_FINISH();
802 IEM_MC_END();
803 }
804 else if (pVCpu->iem.s.uVexLength == 0)
805 {
806 /*
807 * 128-bit: Memory, register.
808 */
809 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
810 IEM_MC_LOCAL(RTUINT128U, uSrc);
811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
812
813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
814 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
816 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
817
818 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
819 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
820
821 IEM_MC_ADVANCE_RIP_AND_FINISH();
822 IEM_MC_END();
823 }
824 else
825 {
826 /*
827 * 256-bit: Memory, register.
828 */
829 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEM_MC_LOCAL(RTUINT256U, uSrc);
831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
832
833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
834 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
837
838 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
840
841 IEM_MC_ADVANCE_RIP_AND_FINISH();
842 IEM_MC_END();
843 }
844}
845
846
847FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
848{
849 Assert(pVCpu->iem.s.uVexLength <= 1);
850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
851 if (IEM_IS_MODRM_REG_MODE(bRm))
852 {
853 /**
854 * @opcode 0x11
855 * @oppfx 0xf3
856 * @opcodesub 11 mr/reg
857 * @opcpuid avx
858 * @opgroup og_avx_simdfp_datamerge
859 * @opxcpttype 5
860 * @optest op1=1 op2=0 op3=2 -> op1=2
861 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
862 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
863 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
864 */
865 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
866 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
867 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
868
869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
870 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
871 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
872 IEM_GET_MODRM_REG(pVCpu, bRm),
873 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
874 IEM_MC_ADVANCE_RIP_AND_FINISH();
875 IEM_MC_END();
876 }
877 else
878 {
879 /**
880 * @opdone
881 * @opcode 0x11
882 * @oppfx 0xf3
883 * @opcodesub !11 mr/reg
884 * @opcpuid avx
885 * @opgroup og_avx_simdfp_datamove
886 * @opxcpttype 5
887 * @opfunction iemOp_vmovss_Vss_Hss_Wss
888 * @optest op1=1 op2=2 -> op1=2
889 * @optest op1=0 op2=-22 -> op1=-22
890 */
891 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
893 IEM_MC_LOCAL(uint32_t, uSrc);
894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
895
896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
897 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
900
901 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
903
904 IEM_MC_ADVANCE_RIP_AND_FINISH();
905 IEM_MC_END();
906 }
907}
908
909
910FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
911{
912 Assert(pVCpu->iem.s.uVexLength <= 1);
913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
914 if (IEM_IS_MODRM_REG_MODE(bRm))
915 {
916 /**
917 * @opcode 0x11
918 * @oppfx 0xf2
919 * @opcodesub 11 mr/reg
920 * @opcpuid avx
921 * @opgroup og_avx_simdfp_datamerge
922 * @opxcpttype 5
923 * @optest op1=1 op2=0 op3=2 -> op1=2
924 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
925 * @optest op1=3 op2=-1 op3=0x77 ->
926 * op1=0xffffffffffffffff0000000000000077
927 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
928 */
929 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
930 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
931 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
932
933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
934 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
935 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
936 IEM_GET_MODRM_REG(pVCpu, bRm),
937 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
938 IEM_MC_ADVANCE_RIP_AND_FINISH();
939 IEM_MC_END();
940 }
941 else
942 {
943 /**
944 * @opdone
945 * @opcode 0x11
946 * @oppfx 0xf2
947 * @opcodesub !11 mr/reg
948 * @opcpuid avx
949 * @opgroup og_avx_simdfp_datamove
950 * @opxcpttype 5
951 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
952 * @optest op1=1 op2=2 -> op1=2
953 * @optest op1=0 op2=-22 -> op1=-22
954 */
955 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEM_MC_LOCAL(uint64_t, uSrc);
958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
959
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
962 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
963 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
964
965 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
966 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
967
968 IEM_MC_ADVANCE_RIP_AND_FINISH();
969 IEM_MC_END();
970 }
971}
972
973
974FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
975{
976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
977 if (IEM_IS_MODRM_REG_MODE(bRm))
978 {
979 /**
980 * @opcode 0x12
981 * @opcodesub 11 mr/reg
982 * @oppfx none
983 * @opcpuid avx
984 * @opgroup og_avx_simdfp_datamerge
985 * @opxcpttype 7LZ
986 * @optest op2=0x2200220122022203
987 * op3=0x3304330533063307
988 * -> op1=0x22002201220222033304330533063307
989 * @optest op2=-1 op3=-42 -> op1=-42
990 * @note op3 and op2 are only the 8-byte high XMM register halfs.
991 */
992 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
993 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
994 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
995
996 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
997 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
998 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
999 IEM_GET_MODRM_RM(pVCpu, bRm),
1000 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1001
1002 IEM_MC_ADVANCE_RIP_AND_FINISH();
1003 IEM_MC_END();
1004 }
1005 else
1006 {
1007 /**
1008 * @opdone
1009 * @opcode 0x12
1010 * @opcodesub !11 mr/reg
1011 * @oppfx none
1012 * @opcpuid avx
1013 * @opgroup og_avx_simdfp_datamove
1014 * @opxcpttype 5LZ
1015 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1016 * @optest op1=1 op2=0 op3=0 -> op1=0
1017 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1018 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1019 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1020 */
1021 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1022
1023 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1024 IEM_MC_LOCAL(uint64_t, uSrc);
1025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1026
1027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1028 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1029 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1030 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1031
1032 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1033 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1034 uSrc,
1035 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1036
1037 IEM_MC_ADVANCE_RIP_AND_FINISH();
1038 IEM_MC_END();
1039 }
1040}
1041
1042
1043/**
1044 * @opcode 0x12
1045 * @opcodesub !11 mr/reg
1046 * @oppfx 0x66
1047 * @opcpuid avx
1048 * @opgroup og_avx_pcksclr_datamerge
1049 * @opxcpttype 5LZ
1050 * @optest op2=0 op3=2 -> op1=2
1051 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1052 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1053 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1054 */
1055FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1056{
1057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1058 if (IEM_IS_MODRM_MEM_MODE(bRm))
1059 {
1060 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1061
1062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1063 IEM_MC_LOCAL(uint64_t, uSrc);
1064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1065
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1067 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1069 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1070
1071 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1073 uSrc,
1074 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1075
1076 IEM_MC_ADVANCE_RIP_AND_FINISH();
1077 IEM_MC_END();
1078 }
1079
1080 /**
1081 * @opdone
1082 * @opmnemonic udvex660f12m3
1083 * @opcode 0x12
1084 * @opcodesub 11 mr/reg
1085 * @oppfx 0x66
1086 * @opunused immediate
1087 * @opcpuid avx
1088 * @optest ->
1089 */
1090 else
1091 IEMOP_RAISE_INVALID_OPCODE_RET();
1092}
1093
1094
1095/**
1096 * @opcode 0x12
1097 * @oppfx 0xf3
1098 * @opcpuid avx
1099 * @opgroup og_avx_pcksclr_datamove
1100 * @opxcpttype 4
1101 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1102 * -> op1=0x00000002000000020000000100000001
1103 * @optest vex.l==1 /
1104 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1105 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1106 */
1107FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1108{
1109 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1110 Assert(pVCpu->iem.s.uVexLength <= 1);
1111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1112 if (IEM_IS_MODRM_REG_MODE(bRm))
1113 {
1114 /*
1115 * Register, register.
1116 */
1117 if (pVCpu->iem.s.uVexLength == 0)
1118 {
1119 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc);
1122
1123 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1124 IEM_MC_PREPARE_AVX_USAGE();
1125
1126 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1127 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1128 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1129 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1130 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1131 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136 else
1137 {
1138 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1139 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1140 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1141 IEM_MC_PREPARE_AVX_USAGE();
1142
1143 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1144 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1);
1145 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1146
1147 IEM_MC_ADVANCE_RIP_AND_FINISH();
1148 IEM_MC_END();
1149 }
1150 }
1151 else
1152 {
1153 /*
1154 * Register, memory.
1155 */
1156 if (pVCpu->iem.s.uVexLength == 0)
1157 {
1158 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1159 IEM_MC_LOCAL(RTUINT128U, uSrc);
1160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1161
1162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1163 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1164 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1165 IEM_MC_PREPARE_AVX_USAGE();
1166
1167 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1168 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1169 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1170 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1171 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1172 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1173
1174 IEM_MC_ADVANCE_RIP_AND_FINISH();
1175 IEM_MC_END();
1176 }
1177 else
1178 {
1179 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1182 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1184 IEM_MC_PREPARE_AVX_USAGE();
1185
1186 IEM_MC_LOCAL(RTUINT256U, uSrc);
1187 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
1188 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1189
1190 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1191 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1192
1193 IEM_MC_ADVANCE_RIP_AND_FINISH();
1194 IEM_MC_END();
1195 }
1196 }
1197}
1198
1199
1200/**
1201 * @opcode 0x12
1202 * @oppfx 0xf2
1203 * @opcpuid avx
1204 * @opgroup og_avx_pcksclr_datamove
1205 * @opxcpttype 5
1206 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1207 * -> op1=0x22222222111111112222222211111111
1208 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1209 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1210 */
1211FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1212{
1213 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1215 if (IEM_IS_MODRM_REG_MODE(bRm))
1216 {
1217 /*
1218 * Register, register.
1219 */
1220 if (pVCpu->iem.s.uVexLength == 0)
1221 {
1222 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1223 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1224 IEM_MC_LOCAL(uint64_t, uSrc);
1225
1226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1227 IEM_MC_PREPARE_AVX_USAGE();
1228
1229 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1230 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1231 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1232 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1233
1234 IEM_MC_ADVANCE_RIP_AND_FINISH();
1235 IEM_MC_END();
1236 }
1237 else
1238 {
1239 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1240 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1245 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1);
1246 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1247
1248 IEM_MC_ADVANCE_RIP_AND_FINISH();
1249 IEM_MC_END();
1250 }
1251 }
1252 else
1253 {
1254 /*
1255 * Register, memory.
1256 */
1257 if (pVCpu->iem.s.uVexLength == 0)
1258 {
1259 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1261 IEM_MC_LOCAL(uint64_t, uSrc);
1262
1263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1264 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1265 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1266 IEM_MC_PREPARE_AVX_USAGE();
1267
1268 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1269 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1270 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1271 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1272
1273 IEM_MC_ADVANCE_RIP_AND_FINISH();
1274 IEM_MC_END();
1275 }
1276 else
1277 {
1278 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1280
1281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1282 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1284 IEM_MC_PREPARE_AVX_USAGE();
1285
1286 IEM_MC_LOCAL(RTUINT256U, uSrc);
1287 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
1288 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289
1290 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1291 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1292
1293 IEM_MC_ADVANCE_RIP_AND_FINISH();
1294 IEM_MC_END();
1295 }
1296 }
1297}
1298
1299
1300/**
1301 * @opcode 0x13
1302 * @opcodesub !11 mr/reg
1303 * @oppfx none
1304 * @opcpuid avx
1305 * @opgroup og_avx_simdfp_datamove
1306 * @opxcpttype 5
1307 * @optest op1=1 op2=2 -> op1=2
1308 * @optest op1=0 op2=-42 -> op1=-42
1309 */
1310FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1311{
1312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1313 if (IEM_IS_MODRM_MEM_MODE(bRm))
1314 {
1315 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1316
1317 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1318 IEM_MC_LOCAL(uint64_t, uSrc);
1319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1320
1321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1322 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1323 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1324 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1325
1326 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1327 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1328
1329 IEM_MC_ADVANCE_RIP_AND_FINISH();
1330 IEM_MC_END();
1331 }
1332
1333 /**
1334 * @opdone
1335 * @opmnemonic udvex0f13m3
1336 * @opcode 0x13
1337 * @opcodesub 11 mr/reg
1338 * @oppfx none
1339 * @opunused immediate
1340 * @opcpuid avx
1341 * @optest ->
1342 */
1343 else
1344 IEMOP_RAISE_INVALID_OPCODE_RET();
1345}
1346
1347
1348/**
1349 * @opcode 0x13
1350 * @opcodesub !11 mr/reg
1351 * @oppfx 0x66
1352 * @opcpuid avx
1353 * @opgroup og_avx_pcksclr_datamove
1354 * @opxcpttype 5
1355 * @optest op1=1 op2=2 -> op1=2
1356 * @optest op1=0 op2=-42 -> op1=-42
1357 */
1358FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1359{
1360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1361 if (IEM_IS_MODRM_MEM_MODE(bRm))
1362 {
1363 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1365 IEM_MC_LOCAL(uint64_t, uSrc);
1366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1367
1368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1369 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1370 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1371 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1372
1373 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1374 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1375
1376 IEM_MC_ADVANCE_RIP_AND_FINISH();
1377 IEM_MC_END();
1378 }
1379
1380 /**
1381 * @opdone
1382 * @opmnemonic udvex660f13m3
1383 * @opcode 0x13
1384 * @opcodesub 11 mr/reg
1385 * @oppfx 0x66
1386 * @opunused immediate
1387 * @opcpuid avx
1388 * @optest ->
1389 */
1390 else
1391 IEMOP_RAISE_INVALID_OPCODE_RET();
1392}
1393
1394/* Opcode VEX.F3.0F 0x13 - invalid */
1395/* Opcode VEX.F2.0F 0x13 - invalid */
1396
1397/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1398FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1399{
1400 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1401 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1402 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1403}
1404
1405
1406/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1407FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1408{
1409 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1410 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1411 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1412}
1413
1414
1415/* Opcode VEX.F3.0F 0x14 - invalid */
1416/* Opcode VEX.F2.0F 0x14 - invalid */
1417
1418
1419/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1420FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1421{
1422 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1423 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1424 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1425}
1426
1427
1428/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1429FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1430{
1431 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1432 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1433 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1434}
1435
1436
1437/* Opcode VEX.F3.0F 0x15 - invalid */
1438/* Opcode VEX.F2.0F 0x15 - invalid */
1439
1440
1441FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1442{
1443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1444 if (IEM_IS_MODRM_REG_MODE(bRm))
1445 {
1446 /**
1447 * @opcode 0x16
1448 * @opcodesub 11 mr/reg
1449 * @oppfx none
1450 * @opcpuid avx
1451 * @opgroup og_avx_simdfp_datamerge
1452 * @opxcpttype 7LZ
1453 */
1454 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1455
1456 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1457 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1458
1459 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1460 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1461 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1462 IEM_GET_MODRM_RM(pVCpu, bRm),
1463 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1464
1465 IEM_MC_ADVANCE_RIP_AND_FINISH();
1466 IEM_MC_END();
1467 }
1468 else
1469 {
1470 /**
1471 * @opdone
1472 * @opcode 0x16
1473 * @opcodesub !11 mr/reg
1474 * @oppfx none
1475 * @opcpuid avx
1476 * @opgroup og_avx_simdfp_datamove
1477 * @opxcpttype 5LZ
1478 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1479 */
1480 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1481
1482 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1483 IEM_MC_LOCAL(uint64_t, uSrc);
1484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1485
1486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1487 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1488 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1489 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1490
1491 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1492 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1493 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1494 uSrc);
1495
1496 IEM_MC_ADVANCE_RIP_AND_FINISH();
1497 IEM_MC_END();
1498 }
1499}
1500
1501
1502/**
1503 * @opcode 0x16
1504 * @opcodesub !11 mr/reg
1505 * @oppfx 0x66
1506 * @opcpuid avx
1507 * @opgroup og_avx_pcksclr_datamerge
1508 * @opxcpttype 5LZ
1509 */
1510FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1511{
1512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1513 if (IEM_IS_MODRM_MEM_MODE(bRm))
1514 {
1515 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1516
1517 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1518 IEM_MC_LOCAL(uint64_t, uSrc);
1519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1520
1521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1522 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1523 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1524 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1525
1526 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1527 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1528 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1529 uSrc);
1530
1531 IEM_MC_ADVANCE_RIP_AND_FINISH();
1532 IEM_MC_END();
1533 }
1534
1535 /**
1536 * @opdone
1537 * @opmnemonic udvex660f16m3
1538 * @opcode 0x12
1539 * @opcodesub 11 mr/reg
1540 * @oppfx 0x66
1541 * @opunused immediate
1542 * @opcpuid avx
1543 * @optest ->
1544 */
1545 else
1546 IEMOP_RAISE_INVALID_OPCODE_RET();
1547}
1548
1549
1550/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1551/**
1552 * @opcode 0x16
1553 * @oppfx 0xf3
1554 * @opcpuid avx
1555 * @opgroup og_avx_pcksclr_datamove
1556 * @opxcpttype 4
1557 */
1558FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1559{
1560 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1561 Assert(pVCpu->iem.s.uVexLength <= 1);
1562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1563 if (IEM_IS_MODRM_REG_MODE(bRm))
1564 {
1565 /*
1566 * Register, register.
1567 */
1568 if (pVCpu->iem.s.uVexLength == 0)
1569 {
1570 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1571 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1572 IEM_MC_LOCAL(RTUINT128U, uSrc);
1573
1574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1575 IEM_MC_PREPARE_AVX_USAGE();
1576
1577 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1578 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1579 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1580 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1581 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1582 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1583
1584 IEM_MC_ADVANCE_RIP_AND_FINISH();
1585 IEM_MC_END();
1586 }
1587 else
1588 {
1589 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1590 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1591 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1592 IEM_MC_PREPARE_AVX_USAGE();
1593 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1594 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1);
1595 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1596
1597 IEM_MC_ADVANCE_RIP_AND_FINISH();
1598 IEM_MC_END();
1599 }
1600 }
1601 else
1602 {
1603 /*
1604 * Register, memory.
1605 */
1606 if (pVCpu->iem.s.uVexLength == 0)
1607 {
1608 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1609 IEM_MC_LOCAL(RTUINT128U, uSrc);
1610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1611
1612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1618 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1619 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1620 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1621 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1622 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1623
1624 IEM_MC_ADVANCE_RIP_AND_FINISH();
1625 IEM_MC_END();
1626 }
1627 else
1628 {
1629 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1632 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1633 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1634 IEM_MC_PREPARE_AVX_USAGE();
1635
1636 IEM_MC_LOCAL(RTUINT256U, uSrc);
1637 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
1638 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1639
1640 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1641 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1642
1643 IEM_MC_ADVANCE_RIP_AND_FINISH();
1644 IEM_MC_END();
1645 }
1646 }
1647}
1648
1649
1650/* Opcode VEX.F2.0F 0x16 - invalid */
1651
1652
1653/**
1654 * @opcode 0x17
1655 * @opcodesub !11 mr/reg
1656 * @oppfx none
1657 * @opcpuid avx
1658 * @opgroup og_avx_simdfp_datamove
1659 * @opxcpttype 5
1660 */
1661FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1662{
1663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1664 if (IEM_IS_MODRM_MEM_MODE(bRm))
1665 {
1666 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1667
1668 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1669 IEM_MC_LOCAL(uint64_t, uSrc);
1670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1671
1672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1673 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1674 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1675 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1676
1677 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1678 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1679
1680 IEM_MC_ADVANCE_RIP_AND_FINISH();
1681 IEM_MC_END();
1682 }
1683
1684 /**
1685 * @opdone
1686 * @opmnemonic udvex0f17m3
1687 * @opcode 0x17
1688 * @opcodesub 11 mr/reg
1689 * @oppfx none
1690 * @opunused immediate
1691 * @opcpuid avx
1692 * @optest ->
1693 */
1694 else
1695 IEMOP_RAISE_INVALID_OPCODE_RET();
1696}
1697
1698
1699/**
1700 * @opcode 0x17
1701 * @opcodesub !11 mr/reg
1702 * @oppfx 0x66
1703 * @opcpuid avx
1704 * @opgroup og_avx_pcksclr_datamove
1705 * @opxcpttype 5
1706 */
1707FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1708{
1709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1710 if (IEM_IS_MODRM_MEM_MODE(bRm))
1711 {
1712 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1713
1714 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1715 IEM_MC_LOCAL(uint64_t, uSrc);
1716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1717
1718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1719 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1720 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1721 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1722
1723 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1724 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1725
1726 IEM_MC_ADVANCE_RIP_AND_FINISH();
1727 IEM_MC_END();
1728 }
1729
1730 /**
1731 * @opdone
1732 * @opmnemonic udvex660f17m3
1733 * @opcode 0x17
1734 * @opcodesub 11 mr/reg
1735 * @oppfx 0x66
1736 * @opunused immediate
1737 * @opcpuid avx
1738 * @optest ->
1739 */
1740 else
1741 IEMOP_RAISE_INVALID_OPCODE_RET();
1742}
1743
1744
1745/* Opcode VEX.F3.0F 0x17 - invalid */
1746/* Opcode VEX.F2.0F 0x17 - invalid */
1747
1748
1749/* Opcode VEX.0F 0x18 - invalid */
1750/* Opcode VEX.0F 0x19 - invalid */
1751/* Opcode VEX.0F 0x1a - invalid */
1752/* Opcode VEX.0F 0x1b - invalid */
1753/* Opcode VEX.0F 0x1c - invalid */
1754/* Opcode VEX.0F 0x1d - invalid */
1755/* Opcode VEX.0F 0x1e - invalid */
1756/* Opcode VEX.0F 0x1f - invalid */
1757
1758/* Opcode VEX.0F 0x20 - invalid */
1759/* Opcode VEX.0F 0x21 - invalid */
1760/* Opcode VEX.0F 0x22 - invalid */
1761/* Opcode VEX.0F 0x23 - invalid */
1762/* Opcode VEX.0F 0x24 - invalid */
1763/* Opcode VEX.0F 0x25 - invalid */
1764/* Opcode VEX.0F 0x26 - invalid */
1765/* Opcode VEX.0F 0x27 - invalid */
1766
1767/**
1768 * @opcode 0x28
1769 * @oppfx none
1770 * @opcpuid avx
1771 * @opgroup og_avx_pcksclr_datamove
1772 * @opxcpttype 1
1773 * @optest op1=1 op2=2 -> op1=2
1774 * @optest op1=0 op2=-42 -> op1=-42
1775 * @note Almost identical to vmovapd.
1776 */
1777FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1778{
1779 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1780 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1781 Assert(pVCpu->iem.s.uVexLength <= 1);
1782 if (IEM_IS_MODRM_REG_MODE(bRm))
1783 {
1784 /*
1785 * Register, register.
1786 */
1787 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1788 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1789
1790 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1791 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1792 if (pVCpu->iem.s.uVexLength == 0)
1793 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1794 IEM_GET_MODRM_RM(pVCpu, bRm));
1795 else
1796 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1797 IEM_GET_MODRM_RM(pVCpu, bRm));
1798 IEM_MC_ADVANCE_RIP_AND_FINISH();
1799 IEM_MC_END();
1800 }
1801 else
1802 {
1803 /*
1804 * Register, memory.
1805 */
1806 if (pVCpu->iem.s.uVexLength == 0)
1807 {
1808 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1810 IEM_MC_LOCAL(RTUINT128U, uSrc);
1811
1812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1813 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1814 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1815 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1816
1817 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1818 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1819
1820 IEM_MC_ADVANCE_RIP_AND_FINISH();
1821 IEM_MC_END();
1822 }
1823 else
1824 {
1825 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1827 IEM_MC_LOCAL(RTUINT256U, uSrc);
1828
1829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1832 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1833
1834 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1835 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1836
1837 IEM_MC_ADVANCE_RIP_AND_FINISH();
1838 IEM_MC_END();
1839 }
1840 }
1841}
1842
1843
1844/**
1845 * @opcode 0x28
1846 * @oppfx 66
1847 * @opcpuid avx
1848 * @opgroup og_avx_pcksclr_datamove
1849 * @opxcpttype 1
1850 * @optest op1=1 op2=2 -> op1=2
1851 * @optest op1=0 op2=-42 -> op1=-42
1852 * @note Almost identical to vmovaps
1853 */
1854FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1855{
1856 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1858 Assert(pVCpu->iem.s.uVexLength <= 1);
1859 if (IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 /*
1862 * Register, register.
1863 */
1864 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1865 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1866
1867 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1868 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1869 if (pVCpu->iem.s.uVexLength == 0)
1870 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1871 IEM_GET_MODRM_RM(pVCpu, bRm));
1872 else
1873 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1874 IEM_GET_MODRM_RM(pVCpu, bRm));
1875 IEM_MC_ADVANCE_RIP_AND_FINISH();
1876 IEM_MC_END();
1877 }
1878 else
1879 {
1880 /*
1881 * Register, memory.
1882 */
1883 if (pVCpu->iem.s.uVexLength == 0)
1884 {
1885 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1886 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1887 IEM_MC_LOCAL(RTUINT128U, uSrc);
1888
1889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1890 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1891 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1892 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1893
1894 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1895 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1896
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1904 IEM_MC_LOCAL(RTUINT256U, uSrc);
1905
1906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1907 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1908 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1909 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1910
1911 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1912 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1913
1914 IEM_MC_ADVANCE_RIP_AND_FINISH();
1915 IEM_MC_END();
1916 }
1917 }
1918}
1919
1920/**
1921 * @opmnemonic udvexf30f28
1922 * @opcode 0x28
1923 * @oppfx 0xf3
1924 * @opunused vex.modrm
1925 * @opcpuid avx
1926 * @optest ->
1927 * @opdone
1928 */
1929
1930/**
1931 * @opmnemonic udvexf20f28
1932 * @opcode 0x28
1933 * @oppfx 0xf2
1934 * @opunused vex.modrm
1935 * @opcpuid avx
1936 * @optest ->
1937 * @opdone
1938 */
1939
1940/**
1941 * @opcode 0x29
1942 * @oppfx none
1943 * @opcpuid avx
1944 * @opgroup og_avx_pcksclr_datamove
1945 * @opxcpttype 1
1946 * @optest op1=1 op2=2 -> op1=2
1947 * @optest op1=0 op2=-42 -> op1=-42
1948 * @note Almost identical to vmovapd.
1949 */
1950FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1951{
1952 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1954 Assert(pVCpu->iem.s.uVexLength <= 1);
1955 if (IEM_IS_MODRM_REG_MODE(bRm))
1956 {
1957 /*
1958 * Register, register.
1959 */
1960 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1962
1963 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1964 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1965 if (pVCpu->iem.s.uVexLength == 0)
1966 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1967 IEM_GET_MODRM_REG(pVCpu, bRm));
1968 else
1969 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1970 IEM_GET_MODRM_REG(pVCpu, bRm));
1971 IEM_MC_ADVANCE_RIP_AND_FINISH();
1972 IEM_MC_END();
1973 }
1974 else
1975 {
1976 /*
1977 * Register, memory.
1978 */
1979 if (pVCpu->iem.s.uVexLength == 0)
1980 {
1981 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1983 IEM_MC_LOCAL(RTUINT128U, uSrc);
1984
1985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1986 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1987 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1988 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1989
1990 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1991 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1992
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 }
1996 else
1997 {
1998 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2000 IEM_MC_LOCAL(RTUINT256U, uSrc);
2001
2002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2003 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2004 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2005 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2006
2007 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2008 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2009
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012 }
2013 }
2014}
2015
2016/**
2017 * @opcode 0x29
2018 * @oppfx 66
2019 * @opcpuid avx
2020 * @opgroup og_avx_pcksclr_datamove
2021 * @opxcpttype 1
2022 * @optest op1=1 op2=2 -> op1=2
2023 * @optest op1=0 op2=-42 -> op1=-42
2024 * @note Almost identical to vmovaps
2025 */
2026FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2027{
2028 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2029 Assert(pVCpu->iem.s.uVexLength <= 1);
2030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2031 if (IEM_IS_MODRM_REG_MODE(bRm))
2032 {
2033 /*
2034 * Register, register.
2035 */
2036 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2037 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2038
2039 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2040 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2041 if (pVCpu->iem.s.uVexLength == 0)
2042 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2043 IEM_GET_MODRM_REG(pVCpu, bRm));
2044 else
2045 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2046 IEM_GET_MODRM_REG(pVCpu, bRm));
2047 IEM_MC_ADVANCE_RIP_AND_FINISH();
2048 IEM_MC_END();
2049 }
2050 else
2051 {
2052 /*
2053 * Register, memory.
2054 */
2055 if (pVCpu->iem.s.uVexLength == 0)
2056 {
2057 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2059 IEM_MC_LOCAL(RTUINT128U, uSrc);
2060
2061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2062 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2065
2066 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2067 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2068
2069 IEM_MC_ADVANCE_RIP_AND_FINISH();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076 IEM_MC_LOCAL(RTUINT256U, uSrc);
2077
2078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2079 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2080 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2081 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2082
2083 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2084 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2085
2086 IEM_MC_ADVANCE_RIP_AND_FINISH();
2087 IEM_MC_END();
2088 }
2089 }
2090}
2091
2092
2093/**
2094 * @opmnemonic udvexf30f29
2095 * @opcode 0x29
2096 * @oppfx 0xf3
2097 * @opunused vex.modrm
2098 * @opcpuid avx
2099 * @optest ->
2100 * @opdone
2101 */
2102
2103/**
2104 * @opmnemonic udvexf20f29
2105 * @opcode 0x29
2106 * @oppfx 0xf2
2107 * @opunused vex.modrm
2108 * @opcpuid avx
2109 * @optest ->
2110 * @opdone
2111 */
2112
2113
2114/** Opcode VEX.0F 0x2a - invalid */
2115/** Opcode VEX.66.0F 0x2a - invalid */
2116/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2117FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2118/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2119FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2120
2121
2122/**
2123 * @opcode 0x2b
2124 * @opcodesub !11 mr/reg
2125 * @oppfx none
2126 * @opcpuid avx
2127 * @opgroup og_avx_cachect
2128 * @opxcpttype 1
2129 * @optest op1=1 op2=2 -> op1=2
2130 * @optest op1=0 op2=-42 -> op1=-42
2131 * @note Identical implementation to vmovntpd
2132 */
2133FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2134{
2135 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2136 Assert(pVCpu->iem.s.uVexLength <= 1);
2137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2138 if (IEM_IS_MODRM_MEM_MODE(bRm))
2139 {
2140 /*
2141 * memory, register.
2142 */
2143 if (pVCpu->iem.s.uVexLength == 0)
2144 {
2145 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2146 IEM_MC_LOCAL(RTUINT128U, uSrc);
2147 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2148
2149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2150 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2151 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2152 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2153
2154 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2155 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2156
2157 IEM_MC_ADVANCE_RIP_AND_FINISH();
2158 IEM_MC_END();
2159 }
2160 else
2161 {
2162 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2163 IEM_MC_LOCAL(RTUINT256U, uSrc);
2164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2165
2166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2167 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2168 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2169 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2170
2171 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2172 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2173
2174 IEM_MC_ADVANCE_RIP_AND_FINISH();
2175 IEM_MC_END();
2176 }
2177 }
2178 /* The register, register encoding is invalid. */
2179 else
2180 IEMOP_RAISE_INVALID_OPCODE_RET();
2181}
2182
2183/**
2184 * @opcode 0x2b
2185 * @opcodesub !11 mr/reg
2186 * @oppfx 0x66
2187 * @opcpuid avx
2188 * @opgroup og_avx_cachect
2189 * @opxcpttype 1
2190 * @optest op1=1 op2=2 -> op1=2
2191 * @optest op1=0 op2=-42 -> op1=-42
2192 * @note Identical implementation to vmovntps
2193 */
2194FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2195{
2196 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2197 Assert(pVCpu->iem.s.uVexLength <= 1);
2198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2199 if (IEM_IS_MODRM_MEM_MODE(bRm))
2200 {
2201 /*
2202 * memory, register.
2203 */
2204 if (pVCpu->iem.s.uVexLength == 0)
2205 {
2206 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2207 IEM_MC_LOCAL(RTUINT128U, uSrc);
2208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2209
2210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2211 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2214
2215 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2216 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2217
2218 IEM_MC_ADVANCE_RIP_AND_FINISH();
2219 IEM_MC_END();
2220 }
2221 else
2222 {
2223 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2224 IEM_MC_LOCAL(RTUINT256U, uSrc);
2225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2226
2227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2228 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2229 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2230 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2231
2232 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2233 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2234
2235 IEM_MC_ADVANCE_RIP_AND_FINISH();
2236 IEM_MC_END();
2237 }
2238 }
2239 /* The register, register encoding is invalid. */
2240 else
2241 IEMOP_RAISE_INVALID_OPCODE_RET();
2242}
2243
2244/**
2245 * @opmnemonic udvexf30f2b
2246 * @opcode 0x2b
2247 * @oppfx 0xf3
2248 * @opunused vex.modrm
2249 * @opcpuid avx
2250 * @optest ->
2251 * @opdone
2252 */
2253
2254/**
2255 * @opmnemonic udvexf20f2b
2256 * @opcode 0x2b
2257 * @oppfx 0xf2
2258 * @opunused vex.modrm
2259 * @opcpuid avx
2260 * @optest ->
2261 * @opdone
2262 */
2263
2264
2265/* Opcode VEX.0F 0x2c - invalid */
2266/* Opcode VEX.66.0F 0x2c - invalid */
2267/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2268FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2269/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2270FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2271
2272/* Opcode VEX.0F 0x2d - invalid */
2273/* Opcode VEX.66.0F 0x2d - invalid */
2274/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2275FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2276/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2277FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2278
2279
2280/**
2281 * @opcode 0x2e
2282 * @oppfx none
2283 * @opflmodify cf,pf,af,zf,sf,of
2284 * @opflclear af,sf,of
2285 */
2286FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2287{
2288 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2290 if (IEM_IS_MODRM_REG_MODE(bRm))
2291 {
2292 /*
2293 * Register, register.
2294 */
2295 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2296 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2297 IEM_MC_LOCAL(uint32_t, fEFlags);
2298 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2299 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2300 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2301 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2302 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2303 IEM_MC_PREPARE_AVX_USAGE();
2304 IEM_MC_FETCH_EFLAGS(fEFlags);
2305 IEM_MC_REF_MXCSR(pfMxcsr);
2306 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2307 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2308 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2309 pfMxcsr, pEFlags, puSrc1, puSrc2);
2310 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2311 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2312 } IEM_MC_ELSE() {
2313 IEM_MC_COMMIT_EFLAGS(fEFlags);
2314 } IEM_MC_ENDIF();
2315
2316 IEM_MC_ADVANCE_RIP_AND_FINISH();
2317 IEM_MC_END();
2318 }
2319 else
2320 {
2321 /*
2322 * Register, memory.
2323 */
2324 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2325 IEM_MC_LOCAL(uint32_t, fEFlags);
2326 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2327 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2328 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2329 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2330 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2332
2333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2334 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2335 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2336 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2337
2338 IEM_MC_PREPARE_AVX_USAGE();
2339 IEM_MC_FETCH_EFLAGS(fEFlags);
2340 IEM_MC_REF_MXCSR(pfMxcsr);
2341 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2342 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2343 pfMxcsr, pEFlags, puSrc1, puSrc2);
2344 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2345 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2346 } IEM_MC_ELSE() {
2347 IEM_MC_COMMIT_EFLAGS(fEFlags);
2348 } IEM_MC_ENDIF();
2349
2350 IEM_MC_ADVANCE_RIP_AND_FINISH();
2351 IEM_MC_END();
2352 }
2353}
2354
2355
2356/**
2357 * @opcode 0x2e
2358 * @oppfx 0x66
2359 * @opflmodify cf,pf,af,zf,sf,of
2360 * @opflclear af,sf,of
2361 */
2362FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2363{
2364 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2366 if (IEM_IS_MODRM_REG_MODE(bRm))
2367 {
2368 /*
2369 * Register, register.
2370 */
2371 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2372 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2373 IEM_MC_LOCAL(uint32_t, fEFlags);
2374 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2375 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2376 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2377 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2378 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2379 IEM_MC_PREPARE_AVX_USAGE();
2380 IEM_MC_FETCH_EFLAGS(fEFlags);
2381 IEM_MC_REF_MXCSR(pfMxcsr);
2382 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2383 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2384 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2385 pfMxcsr, pEFlags, puSrc1, puSrc2);
2386 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2387 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2388 } IEM_MC_ELSE() {
2389 IEM_MC_COMMIT_EFLAGS(fEFlags);
2390 } IEM_MC_ENDIF();
2391
2392 IEM_MC_ADVANCE_RIP_AND_FINISH();
2393 IEM_MC_END();
2394 }
2395 else
2396 {
2397 /*
2398 * Register, memory.
2399 */
2400 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2401 IEM_MC_LOCAL(uint32_t, fEFlags);
2402 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2403 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2404 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2405 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2406 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2408
2409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2410 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2411 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2412 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2413
2414 IEM_MC_PREPARE_AVX_USAGE();
2415 IEM_MC_FETCH_EFLAGS(fEFlags);
2416 IEM_MC_REF_MXCSR(pfMxcsr);
2417 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2418 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2419 pfMxcsr, pEFlags, puSrc1, puSrc2);
2420 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2421 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2422 } IEM_MC_ELSE() {
2423 IEM_MC_COMMIT_EFLAGS(fEFlags);
2424 } IEM_MC_ENDIF();
2425
2426 IEM_MC_ADVANCE_RIP_AND_FINISH();
2427 IEM_MC_END();
2428 }
2429}
2430
2431
2432/* Opcode VEX.F3.0F 0x2e - invalid */
2433/* Opcode VEX.F2.0F 0x2e - invalid */
2434
2435/**
2436 * @opcode 0x2f
2437 * @oppfx none
2438 * @opflmodify cf,pf,af,zf,sf,of
2439 * @opflclear af,sf,of
2440 */
2441FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2442{
2443 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2445 if (IEM_IS_MODRM_REG_MODE(bRm))
2446 {
2447 /*
2448 * Register, register.
2449 */
2450 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2451 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2452 IEM_MC_LOCAL(uint32_t, fEFlags);
2453 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2454 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2455 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2456 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2458 IEM_MC_PREPARE_AVX_USAGE();
2459 IEM_MC_FETCH_EFLAGS(fEFlags);
2460 IEM_MC_REF_MXCSR(pfMxcsr);
2461 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2462 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2463 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2464 pfMxcsr, pEFlags, puSrc1, puSrc2);
2465 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2466 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2467 } IEM_MC_ELSE() {
2468 IEM_MC_COMMIT_EFLAGS(fEFlags);
2469 } IEM_MC_ENDIF();
2470
2471 IEM_MC_ADVANCE_RIP_AND_FINISH();
2472 IEM_MC_END();
2473 }
2474 else
2475 {
2476 /*
2477 * Register, memory.
2478 */
2479 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2480 IEM_MC_LOCAL(uint32_t, fEFlags);
2481 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2482 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2483 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2484 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2485 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2487
2488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2489 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2490 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2491 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2492
2493 IEM_MC_PREPARE_AVX_USAGE();
2494 IEM_MC_FETCH_EFLAGS(fEFlags);
2495 IEM_MC_REF_MXCSR(pfMxcsr);
2496 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2497 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2498 pfMxcsr, pEFlags, puSrc1, puSrc2);
2499 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2500 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2501 } IEM_MC_ELSE() {
2502 IEM_MC_COMMIT_EFLAGS(fEFlags);
2503 } IEM_MC_ENDIF();
2504
2505 IEM_MC_ADVANCE_RIP_AND_FINISH();
2506 IEM_MC_END();
2507 }
2508}
2509
2510
2511/**
2512 * @opcode 0x2f
2513 * @oppfx 0x66
2514 * @opflmodify cf,pf,af,zf,sf,of
2515 * @opflclear af,sf,of
2516 */
2517FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2518{
2519 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if (IEM_IS_MODRM_REG_MODE(bRm))
2522 {
2523 /*
2524 * Register, register.
2525 */
2526 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2527 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2528 IEM_MC_LOCAL(uint32_t, fEFlags);
2529 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2530 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2531 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2532 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2534 IEM_MC_PREPARE_AVX_USAGE();
2535 IEM_MC_FETCH_EFLAGS(fEFlags);
2536 IEM_MC_REF_MXCSR(pfMxcsr);
2537 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2538 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2539 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2540 pfMxcsr, pEFlags, puSrc1, puSrc2);
2541 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2542 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2543 } IEM_MC_ELSE() {
2544 IEM_MC_COMMIT_EFLAGS(fEFlags);
2545 } IEM_MC_ENDIF();
2546
2547 IEM_MC_ADVANCE_RIP_AND_FINISH();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /*
2553 * Register, memory.
2554 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEM_MC_LOCAL(uint32_t, fEFlags);
2557 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2558 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2559 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2560 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2561 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2563
2564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2565 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2566 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2567 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2568
2569 IEM_MC_PREPARE_AVX_USAGE();
2570 IEM_MC_FETCH_EFLAGS(fEFlags);
2571 IEM_MC_REF_MXCSR(pfMxcsr);
2572 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2573 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2574 pfMxcsr, pEFlags, puSrc1, puSrc2);
2575 IEM_MC_IF_MXCSR_XCPT_PENDING() {
2576 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2577 } IEM_MC_ELSE() {
2578 IEM_MC_COMMIT_EFLAGS(fEFlags);
2579 } IEM_MC_ENDIF();
2580
2581 IEM_MC_ADVANCE_RIP_AND_FINISH();
2582 IEM_MC_END();
2583 }
2584}
2585
2586
2587/* Opcode VEX.F3.0F 0x2f - invalid */
2588/* Opcode VEX.F2.0F 0x2f - invalid */
2589
2590/* Opcode VEX.0F 0x30 - invalid */
2591/* Opcode VEX.0F 0x31 - invalid */
2592/* Opcode VEX.0F 0x32 - invalid */
2593/* Opcode VEX.0F 0x33 - invalid */
2594/* Opcode VEX.0F 0x34 - invalid */
2595/* Opcode VEX.0F 0x35 - invalid */
2596/* Opcode VEX.0F 0x36 - invalid */
2597/* Opcode VEX.0F 0x37 - invalid */
2598/* Opcode VEX.0F 0x38 - invalid */
2599/* Opcode VEX.0F 0x39 - invalid */
2600/* Opcode VEX.0F 0x3a - invalid */
2601/* Opcode VEX.0F 0x3b - invalid */
2602/* Opcode VEX.0F 0x3c - invalid */
2603/* Opcode VEX.0F 0x3d - invalid */
2604/* Opcode VEX.0F 0x3e - invalid */
2605/* Opcode VEX.0F 0x3f - invalid */
2606/* Opcode VEX.0F 0x40 - invalid */
2607/* Opcode VEX.0F 0x41 - invalid */
2608/* Opcode VEX.0F 0x42 - invalid */
2609/* Opcode VEX.0F 0x43 - invalid */
2610/* Opcode VEX.0F 0x44 - invalid */
2611/* Opcode VEX.0F 0x45 - invalid */
2612/* Opcode VEX.0F 0x46 - invalid */
2613/* Opcode VEX.0F 0x47 - invalid */
2614/* Opcode VEX.0F 0x48 - invalid */
2615/* Opcode VEX.0F 0x49 - invalid */
2616/* Opcode VEX.0F 0x4a - invalid */
2617/* Opcode VEX.0F 0x4b - invalid */
2618/* Opcode VEX.0F 0x4c - invalid */
2619/* Opcode VEX.0F 0x4d - invalid */
2620/* Opcode VEX.0F 0x4e - invalid */
2621/* Opcode VEX.0F 0x4f - invalid */
2622
2623
2624/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2625FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2626{
2627 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2629 if (IEM_IS_MODRM_REG_MODE(bRm))
2630 {
2631 /*
2632 * Register, register.
2633 */
2634 if (pVCpu->iem.s.uVexLength == 0)
2635 {
2636 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2637 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2638 IEM_MC_LOCAL(uint8_t, u8Dst);
2639 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2640 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2641 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2642 IEM_MC_PREPARE_AVX_USAGE();
2643 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2644 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2645 pu8Dst, puSrc);
2646 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2647 IEM_MC_ADVANCE_RIP_AND_FINISH();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2653 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2654 IEM_MC_LOCAL(uint8_t, u8Dst);
2655 IEM_MC_LOCAL(RTUINT256U, uSrc);
2656 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2657 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2658
2659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2660 IEM_MC_PREPARE_AVX_USAGE();
2661 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2662 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2663 pu8Dst, puSrc);
2664 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2665 IEM_MC_ADVANCE_RIP_AND_FINISH();
2666 IEM_MC_END();
2667 }
2668 }
2669 /* No memory operand. */
2670 else
2671 IEMOP_RAISE_INVALID_OPCODE_RET();
2672}
2673
2674
2675/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2676FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2677{
2678 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680 if (IEM_IS_MODRM_REG_MODE(bRm))
2681 {
2682 /*
2683 * Register, register.
2684 */
2685 if (pVCpu->iem.s.uVexLength == 0)
2686 {
2687 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2688 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2689 IEM_MC_LOCAL(uint8_t, u8Dst);
2690 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2691 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2692 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2693 IEM_MC_PREPARE_AVX_USAGE();
2694 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2695 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2696 pu8Dst, puSrc);
2697 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2698 IEM_MC_ADVANCE_RIP_AND_FINISH();
2699 IEM_MC_END();
2700 }
2701 else
2702 {
2703 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2704 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2705 IEM_MC_LOCAL(uint8_t, u8Dst);
2706 IEM_MC_LOCAL(RTUINT256U, uSrc);
2707 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2708 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2709
2710 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2711 IEM_MC_PREPARE_AVX_USAGE();
2712 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2713 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2714 pu8Dst, puSrc);
2715 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2716 IEM_MC_ADVANCE_RIP_AND_FINISH();
2717 IEM_MC_END();
2718 }
2719 }
2720 /* No memory operand. */
2721 else
2722 IEMOP_RAISE_INVALID_OPCODE_RET();
2723}
2724
2725
2726/* Opcode VEX.F3.0F 0x50 - invalid */
2727/* Opcode VEX.F2.0F 0x50 - invalid */
2728
2729/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2730FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2731/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2732FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2733/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2734FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2735/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2736FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2737
2738/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2739FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2740/* Opcode VEX.66.0F 0x52 - invalid */
2741/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2742FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2743/* Opcode VEX.F2.0F 0x52 - invalid */
2744
2745/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2746FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2747/* Opcode VEX.66.0F 0x53 - invalid */
2748/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2749FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2750/* Opcode VEX.F2.0F 0x53 - invalid */
2751
2752
2753/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2754FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2755{
2756 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2757 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2758 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2759}
2760
2761
2762/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2763FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2764{
2765 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2767 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2768}
2769
2770
2771/* Opcode VEX.F3.0F 0x54 - invalid */
2772/* Opcode VEX.F2.0F 0x54 - invalid */
2773
2774
2775/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2776FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2777{
2778 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2779 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2780 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2781}
2782
2783
2784/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2785FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2786{
2787 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2788 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2789 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2790}
2791
2792
2793/* Opcode VEX.F3.0F 0x55 - invalid */
2794/* Opcode VEX.F2.0F 0x55 - invalid */
2795
2796/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2797FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2798{
2799 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2800 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2801 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2802}
2803
2804
2805/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2806FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2807{
2808 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2809 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2811}
2812
2813
2814/* Opcode VEX.F3.0F 0x56 - invalid */
2815/* Opcode VEX.F2.0F 0x56 - invalid */
2816
2817
2818/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2819FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2820{
2821 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2823 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2824}
2825
2826
2827/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2828FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2829{
2830 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2831 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2832 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2833}
2834
2835
2836/* Opcode VEX.F3.0F 0x57 - invalid */
2837/* Opcode VEX.F2.0F 0x57 - invalid */
2838
2839/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2840FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2841/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2842FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2843/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2844FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2845/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2846FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2847
2848/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2849FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2850/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2851FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2852/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2853FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2854/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2855FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2856
2857/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2858FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2859/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2860FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2861/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2862FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2863/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2864FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2865
2866/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2867FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2868/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2869FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2870/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2871FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2872/* Opcode VEX.F2.0F 0x5b - invalid */
2873
2874/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2875FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2876/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2877FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2878/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2879FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2880/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2881FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2882
2883/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2884FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2885/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2886FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2887/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2888FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2889/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2890FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2891
2892/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2893FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2894/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2895FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2896/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2897FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2898/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2899FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2900
2901/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2902FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2903/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2904FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2905/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2906FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2907/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2908FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2909
2910
2911/* Opcode VEX.0F 0x60 - invalid */
2912
2913
2914/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2915FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2916{
2917 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2918 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2919 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2920}
2921
2922
2923/* Opcode VEX.F3.0F 0x60 - invalid */
2924
2925
2926/* Opcode VEX.0F 0x61 - invalid */
2927
2928
2929/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2930FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2931{
2932 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2933 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2934 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2935}
2936
2937
2938/* Opcode VEX.F3.0F 0x61 - invalid */
2939
2940
2941/* Opcode VEX.0F 0x62 - invalid */
2942
2943/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2944FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2945{
2946 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2947 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2948 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2949}
2950
2951
2952/* Opcode VEX.F3.0F 0x62 - invalid */
2953
2954
2955
2956/* Opcode VEX.0F 0x63 - invalid */
2957
2958
2959/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2960FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2961{
2962 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2963 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2964 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2965}
2966
2967
2968/* Opcode VEX.F3.0F 0x63 - invalid */
2969
2970/* Opcode VEX.0F 0x64 - invalid */
2971
2972
2973/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2974FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2975{
2976 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2977 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2978 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2979}
2980
2981
2982/* Opcode VEX.F3.0F 0x64 - invalid */
2983
2984/* Opcode VEX.0F 0x65 - invalid */
2985
2986
2987/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2988FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2989{
2990 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2991 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
2992 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2993}
2994
2995
2996/* Opcode VEX.F3.0F 0x65 - invalid */
2997
2998/* Opcode VEX.0F 0x66 - invalid */
2999
3000
3001/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3002FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3003{
3004 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3005 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3006 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3007}
3008
3009
3010/* Opcode VEX.F3.0F 0x66 - invalid */
3011
3012/* Opcode VEX.0F 0x67 - invalid */
3013
3014
3015/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3016FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3017{
3018 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3019 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3020 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3021}
3022
3023
3024/* Opcode VEX.F3.0F 0x67 - invalid */
3025
3026
3027///**
3028// * Common worker for SSE2 instructions on the form:
3029// * pxxxx xmm1, xmm2/mem128
3030// *
3031// * The 2nd operand is the second half of a register, which in the memory case
3032// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3033// * where it may read the full 128 bits or only the upper 64 bits.
3034// *
3035// * Exceptions type 4.
3036// */
3037//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3038//{
3039// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3040// if (IEM_IS_MODRM_REG_MODE(bRm))
3041// {
3042// /*
3043// * Register, register.
3044// */
3045// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3046// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3047// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3048// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3049// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3050// IEM_MC_PREPARE_SSE_USAGE();
3051// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3052// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3053// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3054// IEM_MC_ADVANCE_RIP_AND_FINISH();
3055// IEM_MC_END();
3056// }
3057// else
3058// {
3059// /*
3060// * Register, memory.
3061// */
3062// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3063// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3064// IEM_MC_LOCAL(RTUINT128U, uSrc);
3065// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3066// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3067//
3068// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3069// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3070// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3071// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3072//
3073// IEM_MC_PREPARE_SSE_USAGE();
3074// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3075// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3076//
3077// IEM_MC_ADVANCE_RIP_AND_FINISH();
3078// IEM_MC_END();
3079// }
3080// return VINF_SUCCESS;
3081//}
3082
3083
3084/* Opcode VEX.0F 0x68 - invalid */
3085
3086/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3087FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3088{
3089 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3090 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3091 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3092}
3093
3094
3095/* Opcode VEX.F3.0F 0x68 - invalid */
3096
3097
3098/* Opcode VEX.0F 0x69 - invalid */
3099
3100
3101/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3102FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3103{
3104 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3105 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3106 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3107}
3108
3109
3110/* Opcode VEX.F3.0F 0x69 - invalid */
3111
3112
3113/* Opcode VEX.0F 0x6a - invalid */
3114
3115
3116/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3117FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3118{
3119 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3120 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3121 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3122}
3123
3124
3125/* Opcode VEX.F3.0F 0x6a - invalid */
3126
3127
3128/* Opcode VEX.0F 0x6b - invalid */
3129
3130
3131/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3132FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3133{
3134 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3135 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3136 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3137}
3138
3139
3140/* Opcode VEX.F3.0F 0x6b - invalid */
3141
3142
3143/* Opcode VEX.0F 0x6c - invalid */
3144
3145
3146/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3147FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3148{
3149 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3150 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3151 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3152}
3153
3154
3155/* Opcode VEX.F3.0F 0x6c - invalid */
3156/* Opcode VEX.F2.0F 0x6c - invalid */
3157
3158
3159/* Opcode VEX.0F 0x6d - invalid */
3160
3161
3162/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3163FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3164{
3165 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3166 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3167 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3168}
3169
3170
3171/* Opcode VEX.F3.0F 0x6d - invalid */
3172
3173
3174/* Opcode VEX.0F 0x6e - invalid */
3175
3176FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3177{
3178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3179 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3180 {
3181 /**
3182 * @opcode 0x6e
3183 * @opcodesub rex.w=1
3184 * @oppfx 0x66
3185 * @opcpuid avx
3186 * @opgroup og_avx_simdint_datamov
3187 * @opxcpttype 5
3188 * @optest 64-bit / op1=1 op2=2 -> op1=2
3189 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3190 */
3191 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3192 if (IEM_IS_MODRM_REG_MODE(bRm))
3193 {
3194 /* XMM, greg64 */
3195 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3196 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3197 IEM_MC_LOCAL(uint64_t, u64Tmp);
3198
3199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3200 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3201
3202 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3203 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3204
3205 IEM_MC_ADVANCE_RIP_AND_FINISH();
3206 IEM_MC_END();
3207 }
3208 else
3209 {
3210 /* XMM, [mem64] */
3211 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3213 IEM_MC_LOCAL(uint64_t, u64Tmp);
3214
3215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3216 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3217 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3218 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3219
3220 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3221 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3222
3223 IEM_MC_ADVANCE_RIP_AND_FINISH();
3224 IEM_MC_END();
3225 }
3226 }
3227 else
3228 {
3229 /**
3230 * @opdone
3231 * @opcode 0x6e
3232 * @opcodesub rex.w=0
3233 * @oppfx 0x66
3234 * @opcpuid avx
3235 * @opgroup og_avx_simdint_datamov
3236 * @opxcpttype 5
3237 * @opfunction iemOp_vmovd_q_Vy_Ey
3238 * @optest op1=1 op2=2 -> op1=2
3239 * @optest op1=0 op2=-42 -> op1=-42
3240 */
3241 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3242 if (IEM_IS_MODRM_REG_MODE(bRm))
3243 {
3244 /* XMM, greg32 */
3245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3246 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3247 IEM_MC_LOCAL(uint32_t, u32Tmp);
3248
3249 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3250 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3251
3252 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3253 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3254
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 else
3259 {
3260 /* XMM, [mem32] */
3261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3263 IEM_MC_LOCAL(uint32_t, u32Tmp);
3264
3265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3266 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3269
3270 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3271 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3272
3273 IEM_MC_ADVANCE_RIP_AND_FINISH();
3274 IEM_MC_END();
3275 }
3276 }
3277}
3278
3279
3280/* Opcode VEX.F3.0F 0x6e - invalid */
3281
3282
3283/* Opcode VEX.0F 0x6f - invalid */
3284
3285/**
3286 * @opcode 0x6f
3287 * @oppfx 0x66
3288 * @opcpuid avx
3289 * @opgroup og_avx_simdint_datamove
3290 * @opxcpttype 1
3291 * @optest op1=1 op2=2 -> op1=2
3292 * @optest op1=0 op2=-42 -> op1=-42
3293 */
3294FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3295{
3296 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3297 Assert(pVCpu->iem.s.uVexLength <= 1);
3298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3299 if (IEM_IS_MODRM_REG_MODE(bRm))
3300 {
3301 /*
3302 * Register, register.
3303 */
3304 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3305 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3306
3307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3308 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3309 if (pVCpu->iem.s.uVexLength == 0)
3310 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3311 IEM_GET_MODRM_RM(pVCpu, bRm));
3312 else
3313 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3314 IEM_GET_MODRM_RM(pVCpu, bRm));
3315 IEM_MC_ADVANCE_RIP_AND_FINISH();
3316 IEM_MC_END();
3317 }
3318 else if (pVCpu->iem.s.uVexLength == 0)
3319 {
3320 /*
3321 * Register, memory128.
3322 */
3323 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3324 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3326
3327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3328 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3330 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3331
3332 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3333 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3334
3335 IEM_MC_ADVANCE_RIP_AND_FINISH();
3336 IEM_MC_END();
3337 }
3338 else
3339 {
3340 /*
3341 * Register, memory256.
3342 */
3343 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3344 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3346
3347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3348 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3349 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3350 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3351
3352 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3353 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3354
3355 IEM_MC_ADVANCE_RIP_AND_FINISH();
3356 IEM_MC_END();
3357 }
3358}
3359
3360/**
3361 * @opcode 0x6f
3362 * @oppfx 0xf3
3363 * @opcpuid avx
3364 * @opgroup og_avx_simdint_datamove
3365 * @opxcpttype 4UA
3366 * @optest op1=1 op2=2 -> op1=2
3367 * @optest op1=0 op2=-42 -> op1=-42
3368 */
3369FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3370{
3371 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3372 Assert(pVCpu->iem.s.uVexLength <= 1);
3373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3374 if (IEM_IS_MODRM_REG_MODE(bRm))
3375 {
3376 /*
3377 * Register, register.
3378 */
3379 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3380 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3381
3382 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3383 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3384 if (pVCpu->iem.s.uVexLength == 0)
3385 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3386 IEM_GET_MODRM_RM(pVCpu, bRm));
3387 else
3388 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3389 IEM_GET_MODRM_RM(pVCpu, bRm));
3390 IEM_MC_ADVANCE_RIP_AND_FINISH();
3391 IEM_MC_END();
3392 }
3393 else if (pVCpu->iem.s.uVexLength == 0)
3394 {
3395 /*
3396 * Register, memory128.
3397 */
3398 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3399 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3401
3402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3403 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3404 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3405 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3406
3407 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3408 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3409
3410 IEM_MC_ADVANCE_RIP_AND_FINISH();
3411 IEM_MC_END();
3412 }
3413 else
3414 {
3415 /*
3416 * Register, memory256.
3417 */
3418 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3419 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3421
3422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3423 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3424 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3425 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3426
3427 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3428 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3429
3430 IEM_MC_ADVANCE_RIP_AND_FINISH();
3431 IEM_MC_END();
3432 }
3433}
3434
3435
3436/* Opcode VEX.0F 0x70 - invalid */
3437
3438
3439/**
3440 * Common worker for AVX/AVX2 instructions on the forms:
3441 * - vpxxx xmm0, xmm2/mem128, imm8
3442 * - vpxxx ymm0, ymm2/mem256, imm8
3443 *
3444 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3445 */
3446FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3447{
3448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3449 if (IEM_IS_MODRM_REG_MODE(bRm))
3450 {
3451 /*
3452 * Register, register.
3453 */
3454 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3455 if (pVCpu->iem.s.uVexLength)
3456 {
3457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3458 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3459 IEM_MC_LOCAL(RTUINT256U, uDst);
3460 IEM_MC_LOCAL(RTUINT256U, uSrc);
3461 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3462 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3463 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3465 IEM_MC_PREPARE_AVX_USAGE();
3466 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3467 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3468 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3469 IEM_MC_ADVANCE_RIP_AND_FINISH();
3470 IEM_MC_END();
3471 }
3472 else
3473 {
3474 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3475 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3476 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3477 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3478 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3479 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3480 IEM_MC_PREPARE_AVX_USAGE();
3481 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3482 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3483 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3484 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3485 IEM_MC_ADVANCE_RIP_AND_FINISH();
3486 IEM_MC_END();
3487 }
3488 }
3489 else
3490 {
3491 /*
3492 * Register, memory.
3493 */
3494 if (pVCpu->iem.s.uVexLength)
3495 {
3496 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3497 IEM_MC_LOCAL(RTUINT256U, uDst);
3498 IEM_MC_LOCAL(RTUINT256U, uSrc);
3499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3500 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3501 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3502
3503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3504 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3505 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3506 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3508 IEM_MC_PREPARE_AVX_USAGE();
3509
3510 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3511 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3512 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3513
3514 IEM_MC_ADVANCE_RIP_AND_FINISH();
3515 IEM_MC_END();
3516 }
3517 else
3518 {
3519 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3520 IEM_MC_LOCAL(RTUINT128U, uSrc);
3521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3522 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3523 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3524
3525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3526 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3527 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3528 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3529 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3530 IEM_MC_PREPARE_AVX_USAGE();
3531
3532 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3533 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3534 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3535 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3536
3537 IEM_MC_ADVANCE_RIP_AND_FINISH();
3538 IEM_MC_END();
3539 }
3540 }
3541}
3542
3543
3544/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3545FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3546{
3547 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3548 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3549 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3550
3551}
3552
3553
3554/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3555FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3556{
3557 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3558 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3559 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3560
3561}
3562
3563
3564/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3565FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3566{
3567 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3568 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3569 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3570}
3571
3572
3573/**
3574 * Common worker(s) for AVX/AVX2 instructions on the forms:
3575 * - vpxxx xmm0, xmm2, imm8
3576 * - vpxxx ymm0, ymm2, imm8
3577 *
3578 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3579 */
3580FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3581{
3582 if (IEM_IS_MODRM_REG_MODE(bRm))
3583 {
3584 /*
3585 * Register, register.
3586 */
3587 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3588 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3589 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3590 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3591 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3592 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3594 IEM_MC_PREPARE_AVX_USAGE();
3595 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3596 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3597 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3598 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3599 IEM_MC_ADVANCE_RIP_AND_FINISH();
3600 IEM_MC_END();
3601 }
3602 /* No memory operand. */
3603 else
3604 IEMOP_RAISE_INVALID_OPCODE_RET();
3605}
3606
3607FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3608{
3609 if (IEM_IS_MODRM_REG_MODE(bRm))
3610 {
3611 /*
3612 * Register, register.
3613 */
3614 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3615 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3616 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3617 IEM_MC_LOCAL(RTUINT256U, uDst);
3618 IEM_MC_LOCAL(RTUINT256U, uSrc);
3619 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3620 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3621 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3623 IEM_MC_PREPARE_AVX_USAGE();
3624 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3625 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3626 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3627 IEM_MC_ADVANCE_RIP_AND_FINISH();
3628 IEM_MC_END();
3629 }
3630 /* No memory operand. */
3631 else
3632 IEMOP_RAISE_INVALID_OPCODE_RET();
3633}
3634
3635
3636/* Opcode VEX.0F 0x71 11/2 - invalid. */
3637/** Opcode VEX.66.0F 0x71 11/2. */
3638FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3639{
3640 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3641 if (pVCpu->iem.s.uVexLength)
3642 {
3643 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3644 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3645 }
3646 else
3647 {
3648 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3649 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3650 }
3651}
3652
3653
3654/* Opcode VEX.0F 0x71 11/4 - invalid */
3655/** Opcode VEX.66.0F 0x71 11/4. */
3656FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3657{
3658 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3659 if (pVCpu->iem.s.uVexLength)
3660 {
3661 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3662 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3663 }
3664 else
3665 {
3666 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3667 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3668 }
3669}
3670
3671/* Opcode VEX.0F 0x71 11/6 - invalid */
3672
3673/** Opcode VEX.66.0F 0x71 11/6. */
3674FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3675{
3676 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3677 if (pVCpu->iem.s.uVexLength)
3678 {
3679 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3680 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3681 }
3682 else
3683 {
3684 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3685 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3686 }
3687}
3688
3689
3690/**
3691 * VEX Group 12 jump table for register variant.
3692 */
3693IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3694{
3695 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3696 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3697 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3698 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3699 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3700 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3701 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3702 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3703};
3704AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3705
3706
3707/** Opcode VEX.0F 0x71. */
3708FNIEMOP_DEF(iemOp_VGrp12)
3709{
3710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3711 if (IEM_IS_MODRM_REG_MODE(bRm))
3712 /* register, register */
3713 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3714 + pVCpu->iem.s.idxPrefix], bRm);
3715 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3716}
3717
3718
3719/* Opcode VEX.0F 0x72 11/2 - invalid. */
3720/** Opcode VEX.66.0F 0x72 11/2. */
3721FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3722{
3723 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3724 if (pVCpu->iem.s.uVexLength)
3725 {
3726 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3727 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3728 }
3729 else
3730 {
3731 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3732 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3733 }
3734}
3735
3736
3737/* Opcode VEX.0F 0x72 11/4 - invalid. */
3738/** Opcode VEX.66.0F 0x72 11/4. */
3739FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3740{
3741 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3742 if (pVCpu->iem.s.uVexLength)
3743 {
3744 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3745 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3746 }
3747 else
3748 {
3749 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3750 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3751 }
3752}
3753
3754/* Opcode VEX.0F 0x72 11/6 - invalid. */
3755
3756/** Opcode VEX.66.0F 0x72 11/6. */
3757FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3758{
3759 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3760 if (pVCpu->iem.s.uVexLength)
3761 {
3762 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3763 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3764 }
3765 else
3766 {
3767 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3768 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3769 }
3770}
3771
3772
3773/**
3774 * Group 13 jump table for register variant.
3775 */
3776IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3777{
3778 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3779 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3780 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3781 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3782 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3783 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3784 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3785 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3786};
3787AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3788
3789/** Opcode VEX.0F 0x72. */
3790FNIEMOP_DEF(iemOp_VGrp13)
3791{
3792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3793 if (IEM_IS_MODRM_REG_MODE(bRm))
3794 /* register, register */
3795 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3796 + pVCpu->iem.s.idxPrefix], bRm);
3797 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3798}
3799
3800
3801/* Opcode VEX.0F 0x73 11/2 - invalid. */
3802/** Opcode VEX.66.0F 0x73 11/2. */
3803FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3804{
3805 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3806 if (pVCpu->iem.s.uVexLength)
3807 {
3808 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3809 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3810 }
3811 else
3812 {
3813 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3814 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3815 }
3816}
3817
3818
3819/** Opcode VEX.66.0F 0x73 11/3. */
3820FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3821
3822/* Opcode VEX.0F 0x73 11/6 - invalid. */
3823
3824/** Opcode VEX.66.0F 0x73 11/6. */
3825FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3826{
3827 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3828 if (pVCpu->iem.s.uVexLength)
3829 {
3830 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3831 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3832 }
3833 else
3834 {
3835 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3836 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3837 }
3838}
3839
3840/** Opcode VEX.66.0F 0x73 11/7. */
3841FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3842
3843/**
3844 * Group 14 jump table for register variant.
3845 */
3846IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3847{
3848 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3849 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3850 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3851 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3852 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3853 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3854 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3855 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3856};
3857AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3858
3859
3860/** Opcode VEX.0F 0x73. */
3861FNIEMOP_DEF(iemOp_VGrp14)
3862{
3863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3864 if (IEM_IS_MODRM_REG_MODE(bRm))
3865 /* register, register */
3866 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3867 + pVCpu->iem.s.idxPrefix], bRm);
3868 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3869}
3870
3871
3872/* Opcode VEX.0F 0x74 - invalid */
3873
3874
3875/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3876FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3877{
3878 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3879 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3880 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3881}
3882
3883/* Opcode VEX.F3.0F 0x74 - invalid */
3884/* Opcode VEX.F2.0F 0x74 - invalid */
3885
3886
3887/* Opcode VEX.0F 0x75 - invalid */
3888
3889
3890/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3891FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3892{
3893 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3894 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3895 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3896}
3897
3898
3899/* Opcode VEX.F3.0F 0x75 - invalid */
3900/* Opcode VEX.F2.0F 0x75 - invalid */
3901
3902
3903/* Opcode VEX.0F 0x76 - invalid */
3904
3905
3906/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3907FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3908{
3909 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3910 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3911 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3912}
3913
3914
3915/* Opcode VEX.F3.0F 0x76 - invalid */
3916/* Opcode VEX.F2.0F 0x76 - invalid */
3917
3918
3919/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3920FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3921{
3922 Assert(pVCpu->iem.s.uVexLength <= 1);
3923 if (pVCpu->iem.s.uVexLength == 0)
3924 {
3925 /*
3926 * 128-bit: vzeroupper
3927 */
3928 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3929 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3930
3931 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3932 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3933 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3934
3935 IEM_MC_CLEAR_YREG_128_UP(0);
3936 IEM_MC_CLEAR_YREG_128_UP(1);
3937 IEM_MC_CLEAR_YREG_128_UP(2);
3938 IEM_MC_CLEAR_YREG_128_UP(3);
3939 IEM_MC_CLEAR_YREG_128_UP(4);
3940 IEM_MC_CLEAR_YREG_128_UP(5);
3941 IEM_MC_CLEAR_YREG_128_UP(6);
3942 IEM_MC_CLEAR_YREG_128_UP(7);
3943
3944 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3945 {
3946 IEM_MC_CLEAR_YREG_128_UP( 8);
3947 IEM_MC_CLEAR_YREG_128_UP( 9);
3948 IEM_MC_CLEAR_YREG_128_UP(10);
3949 IEM_MC_CLEAR_YREG_128_UP(11);
3950 IEM_MC_CLEAR_YREG_128_UP(12);
3951 IEM_MC_CLEAR_YREG_128_UP(13);
3952 IEM_MC_CLEAR_YREG_128_UP(14);
3953 IEM_MC_CLEAR_YREG_128_UP(15);
3954 }
3955
3956 IEM_MC_ADVANCE_RIP_AND_FINISH();
3957 IEM_MC_END();
3958 }
3959 else
3960 {
3961 /*
3962 * 256-bit: vzeroall
3963 */
3964 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3965 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3966
3967 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3968 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3969 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3970
3971 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
3972 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3973 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3974 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3975 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3976 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3977 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3978 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3979 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3980
3981 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3982 {
3983 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3984 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3985 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3986 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3987 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3988 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3989 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3990 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3991 }
3992
3993 IEM_MC_ADVANCE_RIP_AND_FINISH();
3994 IEM_MC_END();
3995 }
3996}
3997
3998
3999/* Opcode VEX.66.0F 0x77 - invalid */
4000/* Opcode VEX.F3.0F 0x77 - invalid */
4001/* Opcode VEX.F2.0F 0x77 - invalid */
4002
4003/* Opcode VEX.0F 0x78 - invalid */
4004/* Opcode VEX.66.0F 0x78 - invalid */
4005/* Opcode VEX.F3.0F 0x78 - invalid */
4006/* Opcode VEX.F2.0F 0x78 - invalid */
4007
4008/* Opcode VEX.0F 0x79 - invalid */
4009/* Opcode VEX.66.0F 0x79 - invalid */
4010/* Opcode VEX.F3.0F 0x79 - invalid */
4011/* Opcode VEX.F2.0F 0x79 - invalid */
4012
4013/* Opcode VEX.0F 0x7a - invalid */
4014/* Opcode VEX.66.0F 0x7a - invalid */
4015/* Opcode VEX.F3.0F 0x7a - invalid */
4016/* Opcode VEX.F2.0F 0x7a - invalid */
4017
4018/* Opcode VEX.0F 0x7b - invalid */
4019/* Opcode VEX.66.0F 0x7b - invalid */
4020/* Opcode VEX.F3.0F 0x7b - invalid */
4021/* Opcode VEX.F2.0F 0x7b - invalid */
4022
4023/* Opcode VEX.0F 0x7c - invalid */
4024/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4025FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4026/* Opcode VEX.F3.0F 0x7c - invalid */
4027/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4028FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
4029
4030/* Opcode VEX.0F 0x7d - invalid */
4031/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4032FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4033/* Opcode VEX.F3.0F 0x7d - invalid */
4034/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4035FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4036
4037
4038/* Opcode VEX.0F 0x7e - invalid */
4039
4040FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4041{
4042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4043 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4044 {
4045 /**
4046 * @opcode 0x7e
4047 * @opcodesub rex.w=1
4048 * @oppfx 0x66
4049 * @opcpuid avx
4050 * @opgroup og_avx_simdint_datamov
4051 * @opxcpttype 5
4052 * @optest 64-bit / op1=1 op2=2 -> op1=2
4053 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4054 */
4055 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4056 if (IEM_IS_MODRM_REG_MODE(bRm))
4057 {
4058 /* greg64, XMM */
4059 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4060 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4061 IEM_MC_LOCAL(uint64_t, u64Tmp);
4062
4063 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4064 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4065
4066 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4067 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4068
4069 IEM_MC_ADVANCE_RIP_AND_FINISH();
4070 IEM_MC_END();
4071 }
4072 else
4073 {
4074 /* [mem64], XMM */
4075 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4077 IEM_MC_LOCAL(uint64_t, u64Tmp);
4078
4079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4080 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4083
4084 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4085 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4086
4087 IEM_MC_ADVANCE_RIP_AND_FINISH();
4088 IEM_MC_END();
4089 }
4090 }
4091 else
4092 {
4093 /**
4094 * @opdone
4095 * @opcode 0x7e
4096 * @opcodesub rex.w=0
4097 * @oppfx 0x66
4098 * @opcpuid avx
4099 * @opgroup og_avx_simdint_datamov
4100 * @opxcpttype 5
4101 * @opfunction iemOp_vmovd_q_Vy_Ey
4102 * @optest op1=1 op2=2 -> op1=2
4103 * @optest op1=0 op2=-42 -> op1=-42
4104 */
4105 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4106 if (IEM_IS_MODRM_REG_MODE(bRm))
4107 {
4108 /* greg32, XMM */
4109 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4110 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4111 IEM_MC_LOCAL(uint32_t, u32Tmp);
4112
4113 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4114 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4115
4116 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4117 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4118
4119 IEM_MC_ADVANCE_RIP_AND_FINISH();
4120 IEM_MC_END();
4121 }
4122 else
4123 {
4124 /* [mem32], XMM */
4125 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4127 IEM_MC_LOCAL(uint32_t, u32Tmp);
4128
4129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4130 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4132 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4133
4134 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4135 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4136
4137 IEM_MC_ADVANCE_RIP_AND_FINISH();
4138 IEM_MC_END();
4139 }
4140 }
4141}
4142
4143
4144/**
4145 * @opcode 0x7e
4146 * @oppfx 0xf3
4147 * @opcpuid avx
4148 * @opgroup og_avx_pcksclr_datamove
4149 * @opxcpttype none
4150 * @optest op1=1 op2=2 -> op1=2
4151 * @optest op1=0 op2=-42 -> op1=-42
4152 */
4153FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4154{
4155 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4157 if (IEM_IS_MODRM_REG_MODE(bRm))
4158 {
4159 /*
4160 * Register, register.
4161 */
4162 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4163 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4164
4165 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4166 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4167
4168 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4169 IEM_GET_MODRM_RM(pVCpu, bRm));
4170 IEM_MC_ADVANCE_RIP_AND_FINISH();
4171 IEM_MC_END();
4172 }
4173 else
4174 {
4175 /*
4176 * Memory, register.
4177 */
4178 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4179 IEM_MC_LOCAL(uint64_t, uSrc);
4180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4181
4182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4183 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4185 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4186
4187 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4188 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4189
4190 IEM_MC_ADVANCE_RIP_AND_FINISH();
4191 IEM_MC_END();
4192 }
4193
4194}
4195/* Opcode VEX.F2.0F 0x7e - invalid */
4196
4197
4198/* Opcode VEX.0F 0x7f - invalid */
4199
4200/**
4201 * @opcode 0x7f
4202 * @oppfx 0x66
4203 * @opcpuid avx
4204 * @opgroup og_avx_simdint_datamove
4205 * @opxcpttype 1
4206 * @optest op1=1 op2=2 -> op1=2
4207 * @optest op1=0 op2=-42 -> op1=-42
4208 */
4209FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4210{
4211 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4212 Assert(pVCpu->iem.s.uVexLength <= 1);
4213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4214 if (IEM_IS_MODRM_REG_MODE(bRm))
4215 {
4216 /*
4217 * Register, register.
4218 */
4219 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4221
4222 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4223 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4224 if (pVCpu->iem.s.uVexLength == 0)
4225 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4226 IEM_GET_MODRM_REG(pVCpu, bRm));
4227 else
4228 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4229 IEM_GET_MODRM_REG(pVCpu, bRm));
4230 IEM_MC_ADVANCE_RIP_AND_FINISH();
4231 IEM_MC_END();
4232 }
4233 else if (pVCpu->iem.s.uVexLength == 0)
4234 {
4235 /*
4236 * Register, memory128.
4237 */
4238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4239 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4241
4242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4243 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4244 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4245 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4246
4247 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4248 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4249
4250 IEM_MC_ADVANCE_RIP_AND_FINISH();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 /*
4256 * Register, memory256.
4257 */
4258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4259 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4261
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4263 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4264 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4265 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4266
4267 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4268 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4269
4270 IEM_MC_ADVANCE_RIP_AND_FINISH();
4271 IEM_MC_END();
4272 }
4273}
4274
4275
4276/**
4277 * @opcode 0x7f
4278 * @oppfx 0xf3
4279 * @opcpuid avx
4280 * @opgroup og_avx_simdint_datamove
4281 * @opxcpttype 4UA
4282 * @optest op1=1 op2=2 -> op1=2
4283 * @optest op1=0 op2=-42 -> op1=-42
4284 */
4285FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4286{
4287 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4288 Assert(pVCpu->iem.s.uVexLength <= 1);
4289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4290 if (IEM_IS_MODRM_REG_MODE(bRm))
4291 {
4292 /*
4293 * Register, register.
4294 */
4295 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4296 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4297
4298 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4299 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4300 if (pVCpu->iem.s.uVexLength == 0)
4301 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4302 IEM_GET_MODRM_REG(pVCpu, bRm));
4303 else
4304 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4305 IEM_GET_MODRM_REG(pVCpu, bRm));
4306 IEM_MC_ADVANCE_RIP_AND_FINISH();
4307 IEM_MC_END();
4308 }
4309 else if (pVCpu->iem.s.uVexLength == 0)
4310 {
4311 /*
4312 * Register, memory128.
4313 */
4314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4315 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4317
4318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4319 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4321 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4322
4323 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4324 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4325
4326 IEM_MC_ADVANCE_RIP_AND_FINISH();
4327 IEM_MC_END();
4328 }
4329 else
4330 {
4331 /*
4332 * Register, memory256.
4333 */
4334 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4335 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4337
4338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4339 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4342
4343 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4344 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4345
4346 IEM_MC_ADVANCE_RIP_AND_FINISH();
4347 IEM_MC_END();
4348 }
4349}
4350
4351/* Opcode VEX.F2.0F 0x7f - invalid */
4352
4353
4354/* Opcode VEX.0F 0x80 - invalid */
4355/* Opcode VEX.0F 0x81 - invalid */
4356/* Opcode VEX.0F 0x82 - invalid */
4357/* Opcode VEX.0F 0x83 - invalid */
4358/* Opcode VEX.0F 0x84 - invalid */
4359/* Opcode VEX.0F 0x85 - invalid */
4360/* Opcode VEX.0F 0x86 - invalid */
4361/* Opcode VEX.0F 0x87 - invalid */
4362/* Opcode VEX.0F 0x88 - invalid */
4363/* Opcode VEX.0F 0x89 - invalid */
4364/* Opcode VEX.0F 0x8a - invalid */
4365/* Opcode VEX.0F 0x8b - invalid */
4366/* Opcode VEX.0F 0x8c - invalid */
4367/* Opcode VEX.0F 0x8d - invalid */
4368/* Opcode VEX.0F 0x8e - invalid */
4369/* Opcode VEX.0F 0x8f - invalid */
4370/* Opcode VEX.0F 0x90 - invalid */
4371/* Opcode VEX.0F 0x91 - invalid */
4372/* Opcode VEX.0F 0x92 - invalid */
4373/* Opcode VEX.0F 0x93 - invalid */
4374/* Opcode VEX.0F 0x94 - invalid */
4375/* Opcode VEX.0F 0x95 - invalid */
4376/* Opcode VEX.0F 0x96 - invalid */
4377/* Opcode VEX.0F 0x97 - invalid */
4378/* Opcode VEX.0F 0x98 - invalid */
4379/* Opcode VEX.0F 0x99 - invalid */
4380/* Opcode VEX.0F 0x9a - invalid */
4381/* Opcode VEX.0F 0x9b - invalid */
4382/* Opcode VEX.0F 0x9c - invalid */
4383/* Opcode VEX.0F 0x9d - invalid */
4384/* Opcode VEX.0F 0x9e - invalid */
4385/* Opcode VEX.0F 0x9f - invalid */
4386/* Opcode VEX.0F 0xa0 - invalid */
4387/* Opcode VEX.0F 0xa1 - invalid */
4388/* Opcode VEX.0F 0xa2 - invalid */
4389/* Opcode VEX.0F 0xa3 - invalid */
4390/* Opcode VEX.0F 0xa4 - invalid */
4391/* Opcode VEX.0F 0xa5 - invalid */
4392/* Opcode VEX.0F 0xa6 - invalid */
4393/* Opcode VEX.0F 0xa7 - invalid */
4394/* Opcode VEX.0F 0xa8 - invalid */
4395/* Opcode VEX.0F 0xa9 - invalid */
4396/* Opcode VEX.0F 0xaa - invalid */
4397/* Opcode VEX.0F 0xab - invalid */
4398/* Opcode VEX.0F 0xac - invalid */
4399/* Opcode VEX.0F 0xad - invalid */
4400
4401
4402/* Opcode VEX.0F 0xae mem/0 - invalid. */
4403/* Opcode VEX.0F 0xae mem/1 - invalid. */
4404
4405/**
4406 * @ opmaps grp15
4407 * @ opcode !11/2
4408 * @ oppfx none
4409 * @ opcpuid sse
4410 * @ opgroup og_sse_mxcsrsm
4411 * @ opxcpttype 5
4412 * @ optest op1=0 -> mxcsr=0
4413 * @ optest op1=0x2083 -> mxcsr=0x2083
4414 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4415 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4416 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4417 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4418 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4419 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4420 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4421 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4422 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4423 */
4424FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4425//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4426//{
4427// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4428// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4429// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4430// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4431// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4432// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4433// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4434// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4435// IEM_MC_END();
4436// return VINF_SUCCESS;
4437//}
4438
4439
4440/**
4441 * @opmaps vexgrp15
4442 * @opcode !11/3
4443 * @oppfx none
4444 * @opcpuid avx
4445 * @opgroup og_avx_mxcsrsm
4446 * @opxcpttype 5
4447 * @optest mxcsr=0 -> op1=0
4448 * @optest mxcsr=0x2083 -> op1=0x2083
4449 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4450 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4451 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4452 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4453 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4454 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4455 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4456 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4457 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4458 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4459 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4460 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4461 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4462 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4463 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4464 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4465 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4466 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4467 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4468 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4469 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4470 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4471 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4472 * -> value.xcpt=0x6
4473 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4474 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4475 * APMv4 rev 3.17 page 509.
4476 * @todo Test this instruction on AMD Ryzen.
4477 */
4478FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4479{
4480 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4481 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4482 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4484 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4485 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4486 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4487 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4488 IEM_MC_END();
4489}
4490
4491/* Opcode VEX.0F 0xae mem/4 - invalid. */
4492/* Opcode VEX.0F 0xae mem/5 - invalid. */
4493/* Opcode VEX.0F 0xae mem/6 - invalid. */
4494/* Opcode VEX.0F 0xae mem/7 - invalid. */
4495
4496/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4497/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4498/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4499/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4500/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4501/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4502/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4503/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4504
4505/**
4506 * Vex group 15 jump table for memory variant.
4507 */
4508IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4509{ /* pfx: none, 066h, 0f3h, 0f2h */
4510 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4511 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4512 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4513 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4514 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4515 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4516 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4517 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4518};
4519AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4520
4521
4522/** Opcode vex. 0xae. */
4523FNIEMOP_DEF(iemOp_VGrp15)
4524{
4525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4526 if (IEM_IS_MODRM_REG_MODE(bRm))
4527 /* register, register */
4528 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4529
4530 /* memory, register */
4531 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4532 + pVCpu->iem.s.idxPrefix], bRm);
4533}
4534
4535
4536/* Opcode VEX.0F 0xaf - invalid. */
4537
4538/* Opcode VEX.0F 0xb0 - invalid. */
4539/* Opcode VEX.0F 0xb1 - invalid. */
4540/* Opcode VEX.0F 0xb2 - invalid. */
4541/* Opcode VEX.0F 0xb2 - invalid. */
4542/* Opcode VEX.0F 0xb3 - invalid. */
4543/* Opcode VEX.0F 0xb4 - invalid. */
4544/* Opcode VEX.0F 0xb5 - invalid. */
4545/* Opcode VEX.0F 0xb6 - invalid. */
4546/* Opcode VEX.0F 0xb7 - invalid. */
4547/* Opcode VEX.0F 0xb8 - invalid. */
4548/* Opcode VEX.0F 0xb9 - invalid. */
4549/* Opcode VEX.0F 0xba - invalid. */
4550/* Opcode VEX.0F 0xbb - invalid. */
4551/* Opcode VEX.0F 0xbc - invalid. */
4552/* Opcode VEX.0F 0xbd - invalid. */
4553/* Opcode VEX.0F 0xbe - invalid. */
4554/* Opcode VEX.0F 0xbf - invalid. */
4555
4556/* Opcode VEX.0F 0xc0 - invalid. */
4557/* Opcode VEX.66.0F 0xc0 - invalid. */
4558/* Opcode VEX.F3.0F 0xc0 - invalid. */
4559/* Opcode VEX.F2.0F 0xc0 - invalid. */
4560
4561/* Opcode VEX.0F 0xc1 - invalid. */
4562/* Opcode VEX.66.0F 0xc1 - invalid. */
4563/* Opcode VEX.F3.0F 0xc1 - invalid. */
4564/* Opcode VEX.F2.0F 0xc1 - invalid. */
4565
4566/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4567FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4568/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4569FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4570/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4571FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4572/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4573FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4574
4575/* Opcode VEX.0F 0xc3 - invalid */
4576/* Opcode VEX.66.0F 0xc3 - invalid */
4577/* Opcode VEX.F3.0F 0xc3 - invalid */
4578/* Opcode VEX.F2.0F 0xc3 - invalid */
4579
4580/* Opcode VEX.0F 0xc4 - invalid */
4581
4582
4583/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4584FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4585{
4586 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4588 if (IEM_IS_MODRM_REG_MODE(bRm))
4589 {
4590 /*
4591 * Register, register.
4592 */
4593 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4594 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4595 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4596 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4597 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4598 IEM_MC_ARG(uint16_t, u16Src, 2);
4599 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4600 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4601 IEM_MC_PREPARE_AVX_USAGE();
4602 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4603 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4604 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4605 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4606 puDst, puSrc, u16Src, bImmArg);
4607 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4608 IEM_MC_ADVANCE_RIP_AND_FINISH();
4609 IEM_MC_END();
4610 }
4611 else
4612 {
4613 /*
4614 * Register, memory.
4615 */
4616 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4618 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4619 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4620 IEM_MC_ARG(uint16_t, u16Src, 2);
4621
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4623 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4624 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
4625 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4626 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4627 IEM_MC_PREPARE_AVX_USAGE();
4628
4629 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4630 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4631 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4633 puDst, puSrc, u16Src, bImmArg);
4634 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4635
4636 IEM_MC_ADVANCE_RIP_AND_FINISH();
4637 IEM_MC_END();
4638 }
4639}
4640
4641
4642/* Opcode VEX.F3.0F 0xc4 - invalid */
4643/* Opcode VEX.F2.0F 0xc4 - invalid */
4644
4645/* Opcode VEX.0F 0xc5 - invalid */
4646
4647
4648/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4649FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4650{
4651 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4653 if (IEM_IS_MODRM_REG_MODE(bRm))
4654 {
4655 /*
4656 * greg32, XMM, imm8.
4657 */
4658 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4659 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4660 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4661 IEM_MC_LOCAL(uint16_t, uValue);
4662 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4663 IEM_MC_PREPARE_AVX_USAGE();
4664 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4665 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 }
4669 /* No memory operand. */
4670 else
4671 IEMOP_RAISE_INVALID_OPCODE_RET();
4672}
4673
4674
4675/* Opcode VEX.F3.0F 0xc5 - invalid */
4676/* Opcode VEX.F2.0F 0xc5 - invalid */
4677
4678
4679#define VSHUFP_X(a_Instr) \
4680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4681 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4682 { \
4683 /* \
4684 * Register, register. \
4685 */ \
4686 if (pVCpu->iem.s.uVexLength) \
4687 { \
4688 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4689 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4690 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4691 IEM_MC_LOCAL(RTUINT256U, uDst); \
4692 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4693 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4694 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4695 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4696 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4697 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4698 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4699 IEM_MC_PREPARE_AVX_USAGE(); \
4700 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4701 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4702 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4703 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4704 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4705 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4706 IEM_MC_END(); \
4707 } \
4708 else \
4709 { \
4710 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4711 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4712 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4713 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4714 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4715 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4716 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4717 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4718 IEM_MC_PREPARE_AVX_USAGE(); \
4719 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4720 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4721 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4722 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4723 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4724 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4725 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4726 IEM_MC_END(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Register, memory. \
4733 */ \
4734 if (pVCpu->iem.s.uVexLength) \
4735 { \
4736 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4737 IEM_MC_LOCAL(RTUINT256U, uDst); \
4738 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4739 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4741 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4742 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4743 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4745 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4746 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4747 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4748 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4749 IEM_MC_PREPARE_AVX_USAGE(); \
4750 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4751 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4752 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4753 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4754 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4755 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4756 IEM_MC_END(); \
4757 } \
4758 else \
4759 { \
4760 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4761 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4763 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4764 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4765 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4767 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4768 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4769 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4770 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4771 IEM_MC_PREPARE_AVX_USAGE(); \
4772 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4773 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4774 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4775 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4776 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4777 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4779 IEM_MC_END(); \
4780 } \
4781 } \
4782 (void)0
4783
4784/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4785FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4786{
4787 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4788 VSHUFP_X(vshufps);
4789}
4790
4791
4792/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4793FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4794{
4795 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4796 VSHUFP_X(vshufpd);
4797}
4798#undef VSHUFP_X
4799
4800
4801/* Opcode VEX.F3.0F 0xc6 - invalid */
4802/* Opcode VEX.F2.0F 0xc6 - invalid */
4803
4804/* Opcode VEX.0F 0xc7 - invalid */
4805/* Opcode VEX.66.0F 0xc7 - invalid */
4806/* Opcode VEX.F3.0F 0xc7 - invalid */
4807/* Opcode VEX.F2.0F 0xc7 - invalid */
4808
4809/* Opcode VEX.0F 0xc8 - invalid */
4810/* Opcode VEX.0F 0xc9 - invalid */
4811/* Opcode VEX.0F 0xca - invalid */
4812/* Opcode VEX.0F 0xcb - invalid */
4813/* Opcode VEX.0F 0xcc - invalid */
4814/* Opcode VEX.0F 0xcd - invalid */
4815/* Opcode VEX.0F 0xce - invalid */
4816/* Opcode VEX.0F 0xcf - invalid */
4817
4818
4819/* Opcode VEX.0F 0xd0 - invalid */
4820/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4821FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4822/* Opcode VEX.F3.0F 0xd0 - invalid */
4823/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4824FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4825
4826/* Opcode VEX.0F 0xd1 - invalid */
4827/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4828FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4829{
4830 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4831 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4832 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4833}
4834
4835/* Opcode VEX.F3.0F 0xd1 - invalid */
4836/* Opcode VEX.F2.0F 0xd1 - invalid */
4837
4838/* Opcode VEX.0F 0xd2 - invalid */
4839/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4840FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4841{
4842 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4843 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4845}
4846
4847/* Opcode VEX.F3.0F 0xd2 - invalid */
4848/* Opcode VEX.F2.0F 0xd2 - invalid */
4849
4850/* Opcode VEX.0F 0xd3 - invalid */
4851/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4852FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4853{
4854 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4855 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4856 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4857}
4858
4859/* Opcode VEX.F3.0F 0xd3 - invalid */
4860/* Opcode VEX.F2.0F 0xd3 - invalid */
4861
4862/* Opcode VEX.0F 0xd4 - invalid */
4863
4864
4865/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4866FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4867{
4868 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4869 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4870 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4871}
4872
4873
4874/* Opcode VEX.F3.0F 0xd4 - invalid */
4875/* Opcode VEX.F2.0F 0xd4 - invalid */
4876
4877/* Opcode VEX.0F 0xd5 - invalid */
4878
4879
4880/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4881FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4882{
4883 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4884 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4885 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4886}
4887
4888
4889/* Opcode VEX.F3.0F 0xd5 - invalid */
4890/* Opcode VEX.F2.0F 0xd5 - invalid */
4891
4892/* Opcode VEX.0F 0xd6 - invalid */
4893
4894/**
4895 * @opcode 0xd6
4896 * @oppfx 0x66
4897 * @opcpuid avx
4898 * @opgroup og_avx_pcksclr_datamove
4899 * @opxcpttype none
4900 * @optest op1=-1 op2=2 -> op1=2
4901 * @optest op1=0 op2=-42 -> op1=-42
4902 */
4903FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4904{
4905 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4907 if (IEM_IS_MODRM_REG_MODE(bRm))
4908 {
4909 /*
4910 * Register, register.
4911 */
4912 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4913 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4914
4915 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4916 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4917
4918 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4919 IEM_GET_MODRM_REG(pVCpu, bRm));
4920 IEM_MC_ADVANCE_RIP_AND_FINISH();
4921 IEM_MC_END();
4922 }
4923 else
4924 {
4925 /*
4926 * Memory, register.
4927 */
4928 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4929 IEM_MC_LOCAL(uint64_t, uSrc);
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4931
4932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4933 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4934 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4935 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4936
4937 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4938 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4939
4940 IEM_MC_ADVANCE_RIP_AND_FINISH();
4941 IEM_MC_END();
4942 }
4943}
4944
4945/* Opcode VEX.F3.0F 0xd6 - invalid */
4946/* Opcode VEX.F2.0F 0xd6 - invalid */
4947
4948
4949/* Opcode VEX.0F 0xd7 - invalid */
4950
4951/** Opcode VEX.66.0F 0xd7 - */
4952FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4953{
4954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4955 /* Docs says register only. */
4956 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4957 {
4958 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4959 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4960 if (pVCpu->iem.s.uVexLength)
4961 {
4962 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4963 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4964 IEM_MC_ARG(uint64_t *, puDst, 0);
4965 IEM_MC_LOCAL(RTUINT256U, uSrc);
4966 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4967 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4968 IEM_MC_PREPARE_AVX_USAGE();
4969 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4970 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4971 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4972 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4973 IEM_MC_ADVANCE_RIP_AND_FINISH();
4974 IEM_MC_END();
4975 }
4976 else
4977 {
4978 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4979 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4980 IEM_MC_ARG(uint64_t *, puDst, 0);
4981 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4982 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4983 IEM_MC_PREPARE_AVX_USAGE();
4984 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4985 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4986 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4987 IEM_MC_ADVANCE_RIP_AND_FINISH();
4988 IEM_MC_END();
4989 }
4990 }
4991 else
4992 IEMOP_RAISE_INVALID_OPCODE_RET();
4993}
4994
4995
4996/* Opcode VEX.F3.0F 0xd7 - invalid */
4997/* Opcode VEX.F2.0F 0xd7 - invalid */
4998
4999
5000/* Opcode VEX.0F 0xd8 - invalid */
5001
5002/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5003FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5004{
5005 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5006 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5007 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5008}
5009
5010
5011/* Opcode VEX.F3.0F 0xd8 - invalid */
5012/* Opcode VEX.F2.0F 0xd8 - invalid */
5013
5014/* Opcode VEX.0F 0xd9 - invalid */
5015
5016
5017/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5018FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5019{
5020 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5021 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5022 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5023}
5024
5025
5026/* Opcode VEX.F3.0F 0xd9 - invalid */
5027/* Opcode VEX.F2.0F 0xd9 - invalid */
5028
5029/* Opcode VEX.0F 0xda - invalid */
5030
5031
5032/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5033FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5034{
5035 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5036 IEMOPMEDIAF3_INIT_VARS(vpminub);
5037 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5038}
5039
5040
5041/* Opcode VEX.F3.0F 0xda - invalid */
5042/* Opcode VEX.F2.0F 0xda - invalid */
5043
5044/* Opcode VEX.0F 0xdb - invalid */
5045
5046
5047/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5048FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5049{
5050 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5051 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5052 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5053}
5054
5055
5056/* Opcode VEX.F3.0F 0xdb - invalid */
5057/* Opcode VEX.F2.0F 0xdb - invalid */
5058
5059/* Opcode VEX.0F 0xdc - invalid */
5060
5061
5062/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5063FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5064{
5065 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5066 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5067 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5068}
5069
5070
5071/* Opcode VEX.F3.0F 0xdc - invalid */
5072/* Opcode VEX.F2.0F 0xdc - invalid */
5073
5074/* Opcode VEX.0F 0xdd - invalid */
5075
5076
5077/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5078FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5079{
5080 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5081 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5082 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5083}
5084
5085
5086/* Opcode VEX.F3.0F 0xdd - invalid */
5087/* Opcode VEX.F2.0F 0xdd - invalid */
5088
5089/* Opcode VEX.0F 0xde - invalid */
5090
5091
5092/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5093FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5094{
5095 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5096 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
5097 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5098}
5099
5100
5101/* Opcode VEX.F3.0F 0xde - invalid */
5102/* Opcode VEX.F2.0F 0xde - invalid */
5103
5104/* Opcode VEX.0F 0xdf - invalid */
5105
5106
5107/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5108FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5109{
5110 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5111 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5112 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5113}
5114
5115
5116/* Opcode VEX.F3.0F 0xdf - invalid */
5117/* Opcode VEX.F2.0F 0xdf - invalid */
5118
5119/* Opcode VEX.0F 0xe0 - invalid */
5120
5121
5122/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5123FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5124{
5125 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5126 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5127 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5128}
5129
5130
5131/* Opcode VEX.F3.0F 0xe0 - invalid */
5132/* Opcode VEX.F2.0F 0xe0 - invalid */
5133
5134/* Opcode VEX.0F 0xe1 - invalid */
5135/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5136FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5137{
5138 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5139 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5141}
5142
5143/* Opcode VEX.F3.0F 0xe1 - invalid */
5144/* Opcode VEX.F2.0F 0xe1 - invalid */
5145
5146/* Opcode VEX.0F 0xe2 - invalid */
5147/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5148FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5149{
5150 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5151 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5153}
5154
5155/* Opcode VEX.F3.0F 0xe2 - invalid */
5156/* Opcode VEX.F2.0F 0xe2 - invalid */
5157
5158/* Opcode VEX.0F 0xe3 - invalid */
5159
5160
5161/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5162FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5163{
5164 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5165 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5166 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5167}
5168
5169
5170/* Opcode VEX.F3.0F 0xe3 - invalid */
5171/* Opcode VEX.F2.0F 0xe3 - invalid */
5172
5173/* Opcode VEX.0F 0xe4 - invalid */
5174
5175
5176/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5177FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5178{
5179 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5180 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5181 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5182}
5183
5184
5185/* Opcode VEX.F3.0F 0xe4 - invalid */
5186/* Opcode VEX.F2.0F 0xe4 - invalid */
5187
5188/* Opcode VEX.0F 0xe5 - invalid */
5189
5190
5191/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5192FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5193{
5194 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5195 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5196 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5197}
5198
5199
5200/* Opcode VEX.F3.0F 0xe5 - invalid */
5201/* Opcode VEX.F2.0F 0xe5 - invalid */
5202
5203/* Opcode VEX.0F 0xe6 - invalid */
5204/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5205FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5206/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5207FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5208/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5209FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5210
5211
5212/* Opcode VEX.0F 0xe7 - invalid */
5213
5214/**
5215 * @opcode 0xe7
5216 * @opcodesub !11 mr/reg
5217 * @oppfx 0x66
5218 * @opcpuid avx
5219 * @opgroup og_avx_cachect
5220 * @opxcpttype 1
5221 * @optest op1=-1 op2=2 -> op1=2
5222 * @optest op1=0 op2=-42 -> op1=-42
5223 */
5224FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5225{
5226 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5227 Assert(pVCpu->iem.s.uVexLength <= 1);
5228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5229 if (IEM_IS_MODRM_MEM_MODE(bRm))
5230 {
5231 if (pVCpu->iem.s.uVexLength == 0)
5232 {
5233 /*
5234 * 128-bit: Memory, register.
5235 */
5236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5237 IEM_MC_LOCAL(RTUINT128U, uSrc);
5238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5239
5240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5241 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5243 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5244
5245 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5246 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5247
5248 IEM_MC_ADVANCE_RIP_AND_FINISH();
5249 IEM_MC_END();
5250 }
5251 else
5252 {
5253 /*
5254 * 256-bit: Memory, register.
5255 */
5256 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5257 IEM_MC_LOCAL(RTUINT256U, uSrc);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5259
5260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5261 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5264
5265 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5266 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5267
5268 IEM_MC_ADVANCE_RIP_AND_FINISH();
5269 IEM_MC_END();
5270 }
5271 }
5272 /**
5273 * @opdone
5274 * @opmnemonic udvex660fe7reg
5275 * @opcode 0xe7
5276 * @opcodesub 11 mr/reg
5277 * @oppfx 0x66
5278 * @opunused immediate
5279 * @opcpuid avx
5280 * @optest ->
5281 */
5282 else
5283 IEMOP_RAISE_INVALID_OPCODE_RET();
5284}
5285
5286/* Opcode VEX.F3.0F 0xe7 - invalid */
5287/* Opcode VEX.F2.0F 0xe7 - invalid */
5288
5289
5290/* Opcode VEX.0F 0xe8 - invalid */
5291
5292
5293/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5294FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5295{
5296 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5297 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5298 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5299}
5300
5301
5302/* Opcode VEX.F3.0F 0xe8 - invalid */
5303/* Opcode VEX.F2.0F 0xe8 - invalid */
5304
5305/* Opcode VEX.0F 0xe9 - invalid */
5306
5307
5308/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5309FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5310{
5311 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5312 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5313 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5314}
5315
5316
5317/* Opcode VEX.F3.0F 0xe9 - invalid */
5318/* Opcode VEX.F2.0F 0xe9 - invalid */
5319
5320/* Opcode VEX.0F 0xea - invalid */
5321
5322
5323/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5324FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5325{
5326 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5327 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5328 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5329}
5330
5331
5332/* Opcode VEX.F3.0F 0xea - invalid */
5333/* Opcode VEX.F2.0F 0xea - invalid */
5334
5335/* Opcode VEX.0F 0xeb - invalid */
5336
5337
5338/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5339FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5340{
5341 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5342 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5343 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5344}
5345
5346
5347
5348/* Opcode VEX.F3.0F 0xeb - invalid */
5349/* Opcode VEX.F2.0F 0xeb - invalid */
5350
5351/* Opcode VEX.0F 0xec - invalid */
5352
5353
5354/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5355FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5356{
5357 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5358 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5359 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5360}
5361
5362
5363/* Opcode VEX.F3.0F 0xec - invalid */
5364/* Opcode VEX.F2.0F 0xec - invalid */
5365
5366/* Opcode VEX.0F 0xed - invalid */
5367
5368
5369/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5370FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5371{
5372 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5373 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5374 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5375}
5376
5377
5378/* Opcode VEX.F3.0F 0xed - invalid */
5379/* Opcode VEX.F2.0F 0xed - invalid */
5380
5381/* Opcode VEX.0F 0xee - invalid */
5382
5383
5384/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5385FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5386{
5387 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5388 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5389 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5390}
5391
5392
5393/* Opcode VEX.F3.0F 0xee - invalid */
5394/* Opcode VEX.F2.0F 0xee - invalid */
5395
5396
5397/* Opcode VEX.0F 0xef - invalid */
5398
5399
5400/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5401FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5402{
5403 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5404 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5405 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5406}
5407
5408
5409/* Opcode VEX.F3.0F 0xef - invalid */
5410/* Opcode VEX.F2.0F 0xef - invalid */
5411
5412/* Opcode VEX.0F 0xf0 - invalid */
5413/* Opcode VEX.66.0F 0xf0 - invalid */
5414
5415
5416/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5417FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5418{
5419 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5420 Assert(pVCpu->iem.s.uVexLength <= 1);
5421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5422 if (IEM_IS_MODRM_REG_MODE(bRm))
5423 {
5424 /*
5425 * Register, register - (not implemented, assuming it raises \#UD).
5426 */
5427 IEMOP_RAISE_INVALID_OPCODE_RET();
5428 }
5429 else if (pVCpu->iem.s.uVexLength == 0)
5430 {
5431 /*
5432 * Register, memory128.
5433 */
5434 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5435 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5437
5438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5439 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5440 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5441 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5442
5443 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5444 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5445
5446 IEM_MC_ADVANCE_RIP_AND_FINISH();
5447 IEM_MC_END();
5448 }
5449 else
5450 {
5451 /*
5452 * Register, memory256.
5453 */
5454 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5455 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5457
5458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5459 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5460 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5461 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5462
5463 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5464 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5465
5466 IEM_MC_ADVANCE_RIP_AND_FINISH();
5467 IEM_MC_END();
5468 }
5469}
5470
5471
5472/* Opcode VEX.0F 0xf1 - invalid */
5473/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5474FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5475{
5476 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5477 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5478 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5479}
5480
5481/* Opcode VEX.F2.0F 0xf1 - invalid */
5482
5483/* Opcode VEX.0F 0xf2 - invalid */
5484/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5485FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5486{
5487 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5488 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5489 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5490}
5491/* Opcode VEX.F2.0F 0xf2 - invalid */
5492
5493/* Opcode VEX.0F 0xf3 - invalid */
5494/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5495FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5496{
5497 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5498 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5499 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5500}
5501/* Opcode VEX.F2.0F 0xf3 - invalid */
5502
5503/* Opcode VEX.0F 0xf4 - invalid */
5504
5505
5506/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5507FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5508{
5509 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5510 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5511 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5512}
5513
5514
5515/* Opcode VEX.F2.0F 0xf4 - invalid */
5516
5517/* Opcode VEX.0F 0xf5 - invalid */
5518
5519
5520/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5521FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5522{
5523 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5524 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5525 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5526}
5527
5528
5529/* Opcode VEX.F2.0F 0xf5 - invalid */
5530
5531/* Opcode VEX.0F 0xf6 - invalid */
5532
5533
5534/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5535FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5536{
5537 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5538 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5539 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5540}
5541
5542
5543/* Opcode VEX.F2.0F 0xf6 - invalid */
5544
5545/* Opcode VEX.0F 0xf7 - invalid */
5546/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5547FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5548/* Opcode VEX.F2.0F 0xf7 - invalid */
5549
5550/* Opcode VEX.0F 0xf8 - invalid */
5551
5552
5553/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5554FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5555{
5556 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5557 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5558 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5559}
5560
5561
5562/* Opcode VEX.F2.0F 0xf8 - invalid */
5563
5564/* Opcode VEX.0F 0xf9 - invalid */
5565
5566
5567/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5568FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5569{
5570 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5571 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5573}
5574
5575
5576/* Opcode VEX.F2.0F 0xf9 - invalid */
5577
5578/* Opcode VEX.0F 0xfa - invalid */
5579
5580
5581/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5582FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5583{
5584 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5585 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5586 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5587}
5588
5589
5590/* Opcode VEX.F2.0F 0xfa - invalid */
5591
5592/* Opcode VEX.0F 0xfb - invalid */
5593
5594
5595/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5596FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5597{
5598 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5599 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5600 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5601}
5602
5603
5604/* Opcode VEX.F2.0F 0xfb - invalid */
5605
5606/* Opcode VEX.0F 0xfc - invalid */
5607
5608
5609/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5610FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5611{
5612 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5613 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5614 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5615}
5616
5617
5618/* Opcode VEX.F2.0F 0xfc - invalid */
5619
5620/* Opcode VEX.0F 0xfd - invalid */
5621
5622
5623/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5624FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5625{
5626 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5627 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5628 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5629}
5630
5631
5632/* Opcode VEX.F2.0F 0xfd - invalid */
5633
5634/* Opcode VEX.0F 0xfe - invalid */
5635
5636
5637/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5638FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5639{
5640 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5641 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5642 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5643}
5644
5645
5646/* Opcode VEX.F2.0F 0xfe - invalid */
5647
5648
5649/** Opcode **** 0x0f 0xff - UD0 */
5650FNIEMOP_DEF(iemOp_vud0)
5651{
5652/** @todo testcase: vud0 */
5653 IEMOP_MNEMONIC(vud0, "vud0");
5654 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5655 {
5656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5657 if (IEM_IS_MODRM_MEM_MODE(bRm))
5658 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5659 }
5660 IEMOP_HLP_DONE_DECODING();
5661 IEMOP_RAISE_INVALID_OPCODE_RET();
5662}
5663
5664
5665
5666/**
5667 * VEX opcode map \#1.
5668 *
5669 * @sa g_apfnTwoByteMap
5670 */
5671const PFNIEMOP g_apfnVexMap1[] =
5672{
5673 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5674 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5675 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5676 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5677 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5678 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5679 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5680 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5681 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5682 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5683 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5684 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5685 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5686 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5687 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5688 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5689 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5690
5691 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5692 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5693 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5694 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5695 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5696 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5697 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5698 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5699 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5700 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5701 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5702 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5703 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5704 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5705 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5706 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5707
5708 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5709 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5710 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5711 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5712 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5713 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5714 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5715 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5716 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5717 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5718 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5719 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5720 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5721 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5722 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5723 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5724
5725 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5726 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5727 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5728 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5729 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5730 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5731 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5732 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5733 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5734 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5735 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5736 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5737 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5738 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5739 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5740 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5741
5742 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5743 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5744 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5745 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5746 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5747 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5748 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5750 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5751 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5752 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5753 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5754 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5755 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5756 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5757 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5758
5759 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5760 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5761 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5762 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5763 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5764 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5765 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5766 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5767 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5768 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5769 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5770 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5771 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5772 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5773 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5774 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5775
5776 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5777 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5778 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5779 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5780 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5781 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5782 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5783 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5784 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5785 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5786 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5787 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5788 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5789 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5790 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5791 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5792
5793 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5794 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5795 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5796 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5797 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5798 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5799 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5800 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5801 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5802 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5803 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5804 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5805 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5806 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5807 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5808 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5809
5810 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5811 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5812 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5813 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5814 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5815 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5816 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5817 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5818 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5819 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5820 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5821 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5822 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5823 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5824 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5825 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5826
5827 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5828 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5829 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5830 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5831 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5832 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5833 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5834 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5835 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5836 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5837 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5838 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5839 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5840 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5841 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5842 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5843
5844 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5845 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5846 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5847 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5848 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5849 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5850 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5851 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5852 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5853 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5854 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5855 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5856 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5857 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5858 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5859 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5860
5861 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5862 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5863 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5864 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5865 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5866 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5867 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5868 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5869 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5870 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5871 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5875 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5876 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5877
5878 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5879 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5880 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5881 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5882 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5883 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5884 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5885 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5887 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5888 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5889 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5890 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5891 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5893 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5894
5895 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5896 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5897 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5898 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5899 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5900 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5901 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5902 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5903 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5904 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5905 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5906 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5907 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5908 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5909 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5910 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5911
5912 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5913 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5914 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5915 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5916 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5917 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5918 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5919 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5920 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5921 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5922 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5923 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5924 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5925 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5926 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5927 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5928
5929 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5930 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5931 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5932 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5933 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5934 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5935 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5936 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5937 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5938 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5939 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5940 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5941 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5942 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5943 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5944 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5945};
5946AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5947/** @} */
5948
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette