VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 66974

Last change on this file since 66974 was 66972, checked in by vboxsync, 8 years ago

IEM: Implemented vmovaps Vps,Wps (VEX.0F 28).

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 136.1 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 66972 2017-05-19 11:29:45Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2016 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26
27/* Opcode VEX.0F 0x00 - invalid */
28/* Opcode VEX.0F 0x01 - invalid */
29/* Opcode VEX.0F 0x02 - invalid */
30/* Opcode VEX.0F 0x03 - invalid */
31/* Opcode VEX.0F 0x04 - invalid */
32/* Opcode VEX.0F 0x05 - invalid */
33/* Opcode VEX.0F 0x06 - invalid */
34/* Opcode VEX.0F 0x07 - invalid */
35/* Opcode VEX.0F 0x08 - invalid */
36/* Opcode VEX.0F 0x09 - invalid */
37/* Opcode VEX.0F 0x0a - invalid */
38
39/** Opcode VEX.0F 0x0b. */
40FNIEMOP_DEF(iemOp_vud2)
41{
42 IEMOP_MNEMONIC(vud2, "vud2");
43 return IEMOP_RAISE_INVALID_OPCODE();
44}
45
46/* Opcode VEX.0F 0x0c - invalid */
47/* Opcode VEX.0F 0x0d - invalid */
48/* Opcode VEX.0F 0x0e - invalid */
49/* Opcode VEX.0F 0x0f - invalid */
50
51
52/**
53 * @opcode 0x10
54 * @oppfx none
55 * @opcpuid avx
56 * @opgroup og_avx_simdfp_datamove
57 * @opxcpttype 4UA
58 * @optest op1=1 op2=2 -> op1=2
59 * @optest op1=0 op2=-22 -> op1=-22
60 */
61FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
62{
63 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
64 Assert(pVCpu->iem.s.uVexLength <= 1);
65 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
66 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
67 {
68 /*
69 * Register, register.
70 */
71 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
72 IEM_MC_BEGIN(0, 0);
73 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
74 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
75 if (pVCpu->iem.s.uVexLength == 0)
76 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
77 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
78 else
79 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
80 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
81 IEM_MC_ADVANCE_RIP();
82 IEM_MC_END();
83 }
84 else if (pVCpu->iem.s.uVexLength == 0)
85 {
86 /*
87 * 128-bit: Memory, register.
88 */
89 IEM_MC_BEGIN(0, 2);
90 IEM_MC_LOCAL(RTUINT128U, uSrc);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92
93 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
94 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
95 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
96 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
97
98 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
99 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
100
101 IEM_MC_ADVANCE_RIP();
102 IEM_MC_END();
103 }
104 else
105 {
106 /*
107 * 256-bit: Memory, register.
108 */
109 IEM_MC_BEGIN(0, 2);
110 IEM_MC_LOCAL(RTUINT256U, uSrc);
111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
112
113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
114 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
117
118 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
120
121 IEM_MC_ADVANCE_RIP();
122 IEM_MC_END();
123 }
124 return VINF_SUCCESS;
125}
126
127
128/**
129 * @opcode 0x10
130 * @oppfx 0x66
131 * @opcpuid avx
132 * @opgroup og_avx_simdfp_datamove
133 * @opxcpttype 4UA
134 * @optest op1=1 op2=2 -> op1=2
135 * @optest op1=0 op2=-22 -> op1=-22
136 */
137FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
138{
139 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
140 Assert(pVCpu->iem.s.uVexLength <= 1);
141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
143 {
144 /*
145 * Register, register.
146 */
147 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
148 IEM_MC_BEGIN(0, 0);
149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
150 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
151 if (pVCpu->iem.s.uVexLength == 0)
152 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
153 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
154 else
155 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
156 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
157 IEM_MC_ADVANCE_RIP();
158 IEM_MC_END();
159 }
160 else if (pVCpu->iem.s.uVexLength == 0)
161 {
162 /*
163 * 128-bit: Memory, register.
164 */
165 IEM_MC_BEGIN(0, 2);
166 IEM_MC_LOCAL(RTUINT128U, uSrc);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
168
169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
170 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
171 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
172 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
173
174 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
175 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
176
177 IEM_MC_ADVANCE_RIP();
178 IEM_MC_END();
179 }
180 else
181 {
182 /*
183 * 256-bit: Memory, register.
184 */
185 IEM_MC_BEGIN(0, 2);
186 IEM_MC_LOCAL(RTUINT256U, uSrc);
187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
188
189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
193
194 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
195 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
196
197 IEM_MC_ADVANCE_RIP();
198 IEM_MC_END();
199 }
200 return VINF_SUCCESS;
201}
202
203
204FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
205{
206 Assert(pVCpu->iem.s.uVexLength <= 1);
207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
209 {
210 /**
211 * @opcode 0x10
212 * @oppfx 0xf3
213 * @opcodesub 11 mr/reg
214 * @opcpuid avx
215 * @opgroup og_avx_simdfp_datamerge
216 * @opxcpttype 5
217 * @optest op1=1 op2=0 op3=2 -> op1=2
218 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
219 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
220 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
221 * @note HssHi refers to bits 127:32.
222 */
223 IEMOP_MNEMONIC3(VEX_RVM, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
224 IEMOP_HLP_DONE_VEX_DECODING();
225 IEM_MC_BEGIN(0, 0);
226
227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
229 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
230 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
231 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
232 IEM_MC_ADVANCE_RIP();
233 IEM_MC_END();
234 }
235 else
236 {
237 /**
238 * @opdone
239 * @opcode 0x10
240 * @oppfx 0xf3
241 * @opcodesub 11 mr/reg
242 * @opcpuid avx
243 * @opgroup og_avx_simdfp_datamove
244 * @opxcpttype 5
245 * @opfunction iemOp_vmovss_Vss_Hss_Wss
246 * @optest op1=1 op2=2 -> op1=2
247 * @optest op1=0 op2=-22 -> op1=-22
248 */
249 IEMOP_MNEMONIC2(VEX_XM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
250 IEM_MC_BEGIN(0, 2);
251 IEM_MC_LOCAL(uint32_t, uSrc);
252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
253
254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
255 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
256 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
257 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
258
259 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
260 IEM_MC_STORE_YREG_U32_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
261
262 IEM_MC_ADVANCE_RIP();
263 IEM_MC_END();
264 }
265
266 return VINF_SUCCESS;
267}
268
269
270FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
271{
272 Assert(pVCpu->iem.s.uVexLength <= 1);
273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
274 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
275 {
276 /**
277 * @opcode 0x10
278 * @oppfx 0xf2
279 * @opcodesub 11 mr/reg
280 * @opcpuid avx
281 * @opgroup og_avx_simdfp_datamerge
282 * @opxcpttype 5
283 * @optest op1=1 op2=0 op3=2 -> op1=2
284 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
285 * @optest op1=3 op2=-1 op3=0x77 ->
286 * op1=0xffffffffffffffff0000000000000077
287 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
288 */
289 IEMOP_MNEMONIC3(VEX_RVM, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
290 IEMOP_HLP_DONE_VEX_DECODING();
291 IEM_MC_BEGIN(0, 0);
292
293 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
294 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
295 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
296 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
297 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
298 IEM_MC_ADVANCE_RIP();
299 IEM_MC_END();
300 }
301 else
302 {
303 /**
304 * @opdone
305 * @opcode 0x10
306 * @oppfx 0xf2
307 * @opcodesub 11 mr/reg
308 * @opcpuid avx
309 * @opgroup og_avx_simdfp_datamove
310 * @opxcpttype 5
311 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
312 * @optest op1=1 op2=2 -> op1=2
313 * @optest op1=0 op2=-22 -> op1=-22
314 */
315 IEMOP_MNEMONIC2(VEX_XM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
316 IEM_MC_BEGIN(0, 2);
317 IEM_MC_LOCAL(uint64_t, uSrc);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319
320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
321 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
323 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
324
325 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
326 IEM_MC_STORE_YREG_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
327
328 IEM_MC_ADVANCE_RIP();
329 IEM_MC_END();
330 }
331
332 return VINF_SUCCESS;
333}
334
335
336/**
337 * @opcode 0x11
338 * @oppfx none
339 * @opcpuid avx
340 * @opgroup og_avx_simdfp_datamove
341 * @opxcpttype 4UA
342 * @optest op1=1 op2=2 -> op1=2
343 * @optest op1=0 op2=-22 -> op1=-22
344 */
345FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
346{
347 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
348 Assert(pVCpu->iem.s.uVexLength <= 1);
349 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
350 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
351 {
352 /*
353 * Register, register.
354 */
355 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
356 IEM_MC_BEGIN(0, 0);
357 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
358 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
359 if (pVCpu->iem.s.uVexLength == 0)
360 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
361 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
362 else
363 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
364 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
365 IEM_MC_ADVANCE_RIP();
366 IEM_MC_END();
367 }
368 else if (pVCpu->iem.s.uVexLength == 0)
369 {
370 /*
371 * 128-bit: Memory, register.
372 */
373 IEM_MC_BEGIN(0, 2);
374 IEM_MC_LOCAL(RTUINT128U, uSrc);
375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
376
377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
378 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
380 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
381
382 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
383 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
384
385 IEM_MC_ADVANCE_RIP();
386 IEM_MC_END();
387 }
388 else
389 {
390 /*
391 * 256-bit: Memory, register.
392 */
393 IEM_MC_BEGIN(0, 2);
394 IEM_MC_LOCAL(RTUINT256U, uSrc);
395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
396
397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
398 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
400 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
401
402 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
404
405 IEM_MC_ADVANCE_RIP();
406 IEM_MC_END();
407 }
408 return VINF_SUCCESS;
409}
410
411
412/**
413 * @opcode 0x11
414 * @oppfx 0x66
415 * @opcpuid avx
416 * @opgroup og_avx_simdfp_datamove
417 * @opxcpttype 4UA
418 * @optest op1=1 op2=2 -> op1=2
419 * @optest op1=0 op2=-22 -> op1=-22
420 */
421FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
422{
423 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
424 Assert(pVCpu->iem.s.uVexLength <= 1);
425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
427 {
428 /*
429 * Register, register.
430 */
431 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
432 IEM_MC_BEGIN(0, 0);
433 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
434 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
435 if (pVCpu->iem.s.uVexLength == 0)
436 IEM_MC_COPY_YREG_U128_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
437 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
438 else
439 IEM_MC_COPY_YREG_U256_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
440 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
441 IEM_MC_ADVANCE_RIP();
442 IEM_MC_END();
443 }
444 else if (pVCpu->iem.s.uVexLength == 0)
445 {
446 /*
447 * 128-bit: Memory, register.
448 */
449 IEM_MC_BEGIN(0, 2);
450 IEM_MC_LOCAL(RTUINT128U, uSrc);
451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
452
453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
454 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
456 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
457
458 IEM_MC_FETCH_YREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
459 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
460
461 IEM_MC_ADVANCE_RIP();
462 IEM_MC_END();
463 }
464 else
465 {
466 /*
467 * 256-bit: Memory, register.
468 */
469 IEM_MC_BEGIN(0, 2);
470 IEM_MC_LOCAL(RTUINT256U, uSrc);
471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
472
473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
474 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
475 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
476 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
477
478 IEM_MC_FETCH_YREG_U256(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
479 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
480
481 IEM_MC_ADVANCE_RIP();
482 IEM_MC_END();
483 }
484 return VINF_SUCCESS;
485}
486
487
488FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
489{
490 Assert(pVCpu->iem.s.uVexLength <= 1);
491 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
492 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
493 {
494 /**
495 * @opcode 0x11
496 * @oppfx 0xf3
497 * @opcodesub 11 mr/reg
498 * @opcpuid avx
499 * @opgroup og_avx_simdfp_datamerge
500 * @opxcpttype 5
501 * @optest op1=1 op2=0 op3=2 -> op1=2
502 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
503 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
504 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
505 */
506 IEMOP_MNEMONIC3(VEX_MVR, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
507 IEMOP_HLP_DONE_VEX_DECODING();
508 IEM_MC_BEGIN(0, 0);
509
510 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
511 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
512 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB /*U32*/,
513 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
514 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
515 IEM_MC_ADVANCE_RIP();
516 IEM_MC_END();
517 }
518 else
519 {
520 /**
521 * @opdone
522 * @opcode 0x11
523 * @oppfx 0xf3
524 * @opcodesub 11 mr/reg
525 * @opcpuid avx
526 * @opgroup og_avx_simdfp_datamove
527 * @opxcpttype 5
528 * @opfunction iemOp_vmovss_Vss_Hss_Wss
529 * @optest op1=1 op2=2 -> op1=2
530 * @optest op1=0 op2=-22 -> op1=-22
531 */
532 IEMOP_MNEMONIC2(VEX_MR, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
533 IEM_MC_BEGIN(0, 2);
534 IEM_MC_LOCAL(uint32_t, uSrc);
535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
536
537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
538 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
539 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
540 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
541
542 IEM_MC_FETCH_YREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
543 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
544
545 IEM_MC_ADVANCE_RIP();
546 IEM_MC_END();
547 }
548
549 return VINF_SUCCESS;
550}
551
552
553FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
554{
555 Assert(pVCpu->iem.s.uVexLength <= 1);
556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
558 {
559 /**
560 * @opcode 0x11
561 * @oppfx 0xf2
562 * @opcodesub 11 mr/reg
563 * @opcpuid avx
564 * @opgroup og_avx_simdfp_datamerge
565 * @opxcpttype 5
566 * @optest op1=1 op2=0 op3=2 -> op1=2
567 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
568 * @optest op1=3 op2=-1 op3=0x77 ->
569 * op1=0xffffffffffffffff0000000000000077
570 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
571 */
572 IEMOP_MNEMONIC3(VEX_MVR, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
573 IEMOP_HLP_DONE_VEX_DECODING();
574 IEM_MC_BEGIN(0, 0);
575
576 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
577 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
578 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
579 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
580 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
581 IEM_MC_ADVANCE_RIP();
582 IEM_MC_END();
583 }
584 else
585 {
586 /**
587 * @opdone
588 * @opcode 0x11
589 * @oppfx 0xf2
590 * @opcodesub 11 mr/reg
591 * @opcpuid avx
592 * @opgroup og_avx_simdfp_datamove
593 * @opxcpttype 5
594 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
595 * @optest op1=1 op2=2 -> op1=2
596 * @optest op1=0 op2=-22 -> op1=-22
597 */
598 IEMOP_MNEMONIC2(VEX_MR, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_IGNORES_VEX_L);
599 IEM_MC_BEGIN(0, 2);
600 IEM_MC_LOCAL(uint64_t, uSrc);
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
602
603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
604 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
605 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
606 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
607
608 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
609 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
610
611 IEM_MC_ADVANCE_RIP();
612 IEM_MC_END();
613 }
614
615 return VINF_SUCCESS;
616}
617
618
619FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
620{
621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
622 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
623 {
624 /**
625 * @opcode 0x12
626 * @opcodesub 11 mr/reg
627 * @oppfx none
628 * @opcpuid avx
629 * @opgroup og_avx_simdfp_datamerge
630 * @opxcpttype 7LZ
631 * @optest op2=0x2200220122022203
632 * op3=0x3304330533063307
633 * -> op1=0x22002201220222033304330533063307
634 * @optest op2=-1 op3=-42 -> op1=-42
635 * @note op3 and op2 are only the 8-byte high XMM register halfs.
636 */
637 IEMOP_MNEMONIC3(VEX_RVM, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_VEX_L_ZERO);
638
639 IEMOP_HLP_DONE_VEX_DECODING_L0();
640 IEM_MC_BEGIN(0, 0);
641
642 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
643 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
644 IEM_MC_MERGE_YREG_U64HI_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
645 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
646 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
647
648 IEM_MC_ADVANCE_RIP();
649 IEM_MC_END();
650 }
651 else
652 {
653 /**
654 * @opdone
655 * @opcode 0x12
656 * @opcodesub !11 mr/reg
657 * @oppfx none
658 * @opcpuid avx
659 * @opgroup og_avx_simdfp_datamove
660 * @opxcpttype 5LZ
661 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
662 * @optest op1=1 op2=0 op3=0 -> op1=0
663 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
664 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
665 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
666 */
667 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_VEX_L_ZERO);
668
669 IEM_MC_BEGIN(0, 2);
670 IEM_MC_LOCAL(uint64_t, uSrc);
671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
672
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DONE_VEX_DECODING_L0();
675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
677
678 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
680 uSrc,
681 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
682
683 IEM_MC_ADVANCE_RIP();
684 IEM_MC_END();
685 }
686 return VINF_SUCCESS;
687}
688
689
690/**
691 * @opcode 0x12
692 * @opcodesub !11 mr/reg
693 * @oppfx 0x66
694 * @opcpuid avx
695 * @opgroup og_avx_pcksclr_datamerge
696 * @opxcpttype 5LZ
697 * @optest op2=0 op3=2 -> op1=2
698 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
699 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
700 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
701 */
702FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
703{
704 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
705 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
706 {
707 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_VEX_L_ZERO);
708
709 IEM_MC_BEGIN(0, 2);
710 IEM_MC_LOCAL(uint64_t, uSrc);
711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
712
713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
714 IEMOP_HLP_DONE_VEX_DECODING_L0();
715 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
716 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
717
718 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
719 IEM_MC_MERGE_YREG_U64LOCAL_U64_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
720 uSrc,
721 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
722
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 return VINF_SUCCESS;
726 }
727
728 /**
729 * @opdone
730 * @opmnemonic udvex660f12m3
731 * @opcode 0x12
732 * @opcodesub 11 mr/reg
733 * @oppfx 0x66
734 * @opunused immediate
735 * @opcpuid avx
736 * @optest ->
737 */
738 return IEMOP_RAISE_INVALID_OPCODE();
739}
740
741
742/**
743 * @opcode 0x12
744 * @oppfx 0xf3
745 * @opcpuid avx
746 * @opgroup og_avx_pcksclr_datamove
747 * @opxcpttype 4
748 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
749 * -> op1=0x00000002000000020000000100000001
750 * @optest vex.l==1 /
751 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
752 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
753 */
754FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
755{
756 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
757 Assert(pVCpu->iem.s.uVexLength <= 1);
758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
760 {
761 /*
762 * Register, register.
763 */
764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
765 if (pVCpu->iem.s.uVexLength == 0)
766 {
767 IEM_MC_BEGIN(2, 0);
768 IEM_MC_ARG(PRTUINT128U, puDst, 0);
769 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
770
771 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
772 IEM_MC_PREPARE_AVX_USAGE();
773
774 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
775 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
776 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
777 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
778
779 IEM_MC_ADVANCE_RIP();
780 IEM_MC_END();
781 }
782 else
783 {
784 IEM_MC_BEGIN(3, 0);
785 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
786 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
787 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
788
789 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
790 IEM_MC_PREPARE_AVX_USAGE();
791 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
792
793 IEM_MC_ADVANCE_RIP();
794 IEM_MC_END();
795 }
796 }
797 else
798 {
799 /*
800 * Register, memory.
801 */
802 if (pVCpu->iem.s.uVexLength == 0)
803 {
804 IEM_MC_BEGIN(2, 2);
805 IEM_MC_LOCAL(RTUINT128U, uSrc);
806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
807 IEM_MC_ARG(PRTUINT128U, puDst, 0);
808 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
809
810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
811 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
812 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
813 IEM_MC_PREPARE_AVX_USAGE();
814
815 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
816 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
817 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
818 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
819
820 IEM_MC_ADVANCE_RIP();
821 IEM_MC_END();
822 }
823 else
824 {
825 IEM_MC_BEGIN(3, 2);
826 IEM_MC_LOCAL(RTUINT256U, uSrc);
827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
828 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
829 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
830 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
831
832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
833 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
834 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
835 IEM_MC_PREPARE_AVX_USAGE();
836
837 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
838 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
839
840 IEM_MC_ADVANCE_RIP();
841 IEM_MC_END();
842 }
843 }
844 return VINF_SUCCESS;
845}
846
847
848/**
849 * @opcode 0x12
850 * @oppfx 0xf2
851 * @opcpuid avx
852 * @opgroup og_avx_pcksclr_datamove
853 * @opxcpttype 5
854 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
855 * -> op1=0x22222222111111112222222211111111
856 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
857 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
858 */
859FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
860{
861 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
862 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
864 {
865 /*
866 * Register, register.
867 */
868 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
869 if (pVCpu->iem.s.uVexLength == 0)
870 {
871 IEM_MC_BEGIN(2, 0);
872 IEM_MC_ARG(PRTUINT128U, puDst, 0);
873 IEM_MC_ARG(uint64_t, uSrc, 1);
874
875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
876 IEM_MC_PREPARE_AVX_USAGE();
877
878 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
879 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
880 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
881 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
882
883 IEM_MC_ADVANCE_RIP();
884 IEM_MC_END();
885 }
886 else
887 {
888 IEM_MC_BEGIN(3, 0);
889 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
890 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
891 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2);
892
893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
894 IEM_MC_PREPARE_AVX_USAGE();
895 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
896
897 IEM_MC_ADVANCE_RIP();
898 IEM_MC_END();
899 }
900 }
901 else
902 {
903 /*
904 * Register, memory.
905 */
906 if (pVCpu->iem.s.uVexLength == 0)
907 {
908 IEM_MC_BEGIN(2, 2);
909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
910 IEM_MC_ARG(PRTUINT128U, puDst, 0);
911 IEM_MC_ARG(uint64_t, uSrc, 1);
912
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
915 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
916 IEM_MC_PREPARE_AVX_USAGE();
917
918 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
919 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
920 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
921 IEM_MC_CLEAR_YREG_128_UP(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
922
923 IEM_MC_ADVANCE_RIP();
924 IEM_MC_END();
925 }
926 else
927 {
928 IEM_MC_BEGIN(3, 2);
929 IEM_MC_LOCAL(RTUINT256U, uSrc);
930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
931 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
932 IEM_MC_ARG_CONST(uint8_t, iYRegDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1);
933 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
934
935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
936 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
937 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
938 IEM_MC_PREPARE_AVX_USAGE();
939
940 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
941 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
942
943 IEM_MC_ADVANCE_RIP();
944 IEM_MC_END();
945 }
946 }
947 return VINF_SUCCESS;
948}
949
950
951/**
952 * @opcode 0x13
953 * @opcodesub !11 mr/reg
954 * @oppfx none
955 * @opcpuid avx
956 * @opgroup og_avx_simdfp_datamove
957 * @opxcpttype 5
958 * @optest op1=1 op2=2 -> op1=2
959 * @optest op1=0 op2=-42 -> op1=-42
960 */
961FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
962{
963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
964 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
965 {
966 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_VEX_L_ZERO);
967
968 IEM_MC_BEGIN(0, 2);
969 IEM_MC_LOCAL(uint64_t, uSrc);
970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
971
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
974 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
975 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
976
977 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
978 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
979
980 IEM_MC_ADVANCE_RIP();
981 IEM_MC_END();
982 return VINF_SUCCESS;
983 }
984
985 /**
986 * @opdone
987 * @opmnemonic udvex0f13m3
988 * @opcode 0x13
989 * @opcodesub 11 mr/reg
990 * @oppfx none
991 * @opunused immediate
992 * @opcpuid avx
993 * @optest ->
994 */
995 return IEMOP_RAISE_INVALID_OPCODE();
996}
997
998
999/**
1000 * @opcode 0x13
1001 * @opcodesub !11 mr/reg
1002 * @oppfx 0x66
1003 * @opcpuid avx
1004 * @opgroup og_avx_pcksclr_datamove
1005 * @opxcpttype 5
1006 * @optest op1=1 op2=2 -> op1=2
1007 * @optest op1=0 op2=-42 -> op1=-42
1008 */
1009FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1010{
1011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1012 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1013 {
1014 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE | IEMOPHINT_VEX_L_ZERO);
1015 IEM_MC_BEGIN(0, 2);
1016 IEM_MC_LOCAL(uint64_t, uSrc);
1017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1018
1019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1020 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1021 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1022 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1023
1024 IEM_MC_FETCH_YREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1025 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1026
1027 IEM_MC_ADVANCE_RIP();
1028 IEM_MC_END();
1029 return VINF_SUCCESS;
1030 }
1031
1032 /**
1033 * @opdone
1034 * @opmnemonic udvex660f13m3
1035 * @opcode 0x13
1036 * @opcodesub 11 mr/reg
1037 * @oppfx 0x66
1038 * @opunused immediate
1039 * @opcpuid avx
1040 * @optest ->
1041 */
1042 return IEMOP_RAISE_INVALID_OPCODE();
1043}
1044
1045/* Opcode VEX.F3.0F 0x13 - invalid */
1046/* Opcode VEX.F2.0F 0x13 - invalid */
1047
1048/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1049FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1050/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1051FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1052/* Opcode VEX.F3.0F 0x14 - invalid */
1053/* Opcode VEX.F2.0F 0x14 - invalid */
1054/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1055FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1056/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1057FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1058/* Opcode VEX.F3.0F 0x15 - invalid */
1059/* Opcode VEX.F2.0F 0x15 - invalid */
1060/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1061FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1062/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1063FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1064/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1065FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1066/* Opcode VEX.F2.0F 0x16 - invalid */
1067/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
1068FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1069/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
1070FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1071/* Opcode VEX.F3.0F 0x17 - invalid */
1072/* Opcode VEX.F2.0F 0x17 - invalid */
1073
1074
1075/* Opcode VEX.0F 0x18 - invalid */
1076/* Opcode VEX.0F 0x19 - invalid */
1077/* Opcode VEX.0F 0x1a - invalid */
1078/* Opcode VEX.0F 0x1b - invalid */
1079/* Opcode VEX.0F 0x1c - invalid */
1080/* Opcode VEX.0F 0x1d - invalid */
1081/* Opcode VEX.0F 0x1e - invalid */
1082/* Opcode VEX.0F 0x1f - invalid */
1083
1084/* Opcode VEX.0F 0x20 - invalid */
1085/* Opcode VEX.0F 0x21 - invalid */
1086/* Opcode VEX.0F 0x22 - invalid */
1087/* Opcode VEX.0F 0x23 - invalid */
1088/* Opcode VEX.0F 0x24 - invalid */
1089/* Opcode VEX.0F 0x25 - invalid */
1090/* Opcode VEX.0F 0x26 - invalid */
1091/* Opcode VEX.0F 0x27 - invalid */
1092
1093/**
1094 * @opcode 0x28
1095 * @oppfx none
1096 * @opcpuid avx
1097 * @opgroup og_avx_pcksclr_datamove
1098 * @opxcpttype 1
1099 * @optest op1=1 op2=2 -> op1=2
1100 * @optest op1=0 op2=-42 -> op1=-42
1101 */
1102FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1103{
1104 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
1105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1106 Assert(pVCpu->iem.s.uVexLength <= 1);
1107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1108 {
1109 /*
1110 * Register, register.
1111 */
1112 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1113 IEM_MC_BEGIN(1, 0);
1114
1115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1116 IEM_MC_PREPARE_AVX_USAGE();
1117 if (pVCpu->iem.s.uVexLength == 0)
1118 IEM_MC_COPY_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1119 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1120 else
1121 IEM_MC_COPY_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1122 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1123 IEM_MC_ADVANCE_RIP();
1124 IEM_MC_END();
1125 }
1126 else
1127 {
1128 /*
1129 * Register, memory.
1130 */
1131 if (pVCpu->iem.s.uVexLength == 0)
1132 {
1133 IEM_MC_BEGIN(0, 2);
1134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1135 IEM_MC_LOCAL(RTUINT128U, uSrc);
1136
1137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1138 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1140 IEM_MC_PREPARE_AVX_USAGE();
1141
1142 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1143 IEM_MC_STORE_YREG_U128_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1144
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(0, 2);
1151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1152 IEM_MC_LOCAL(RTUINT256U, uSrc);
1153
1154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1155 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1156 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1157 IEM_MC_PREPARE_AVX_USAGE();
1158
1159 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1160 IEM_MC_STORE_YREG_U256_ZX_VLMAX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1161
1162 IEM_MC_ADVANCE_RIP();
1163 IEM_MC_END();
1164 }
1165 }
1166 return VINF_SUCCESS;
1167}
1168
1169
1170
1171/** Opcode VEX.66.0F 0x28 - vmovapd Vpd, Wpd */
1172FNIEMOP_STUB(iemOp_vmovapd_Vpd_Wpd);
1173//FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1174//{
1175// IEMOP_MNEMONIC(vmovapd_Wpd_Wpd, "vmovapd Wpd,Wpd");
1176// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1177// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1178// {
1179// /*
1180// * Register, register.
1181// */
1182// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1183// IEM_MC_BEGIN(0, 0);
1184// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1185// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1186// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1187// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1188// IEM_MC_ADVANCE_RIP();
1189// IEM_MC_END();
1190// }
1191// else
1192// {
1193// /*
1194// * Register, memory.
1195// */
1196// IEM_MC_BEGIN(0, 2);
1197// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1198// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1199//
1200// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1201// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1202// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1203// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1204//
1205// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1206// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1207//
1208// IEM_MC_ADVANCE_RIP();
1209// IEM_MC_END();
1210// }
1211// return VINF_SUCCESS;
1212//}
1213
1214/* Opcode VEX.F3.0F 0x28 - invalid */
1215/* Opcode VEX.F2.0F 0x28 - invalid */
1216
1217/** Opcode VEX.0F 0x29 - vmovaps Wps, Vps */
1218FNIEMOP_STUB(iemOp_vmovaps_Wps_Vps);
1219//FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1220//{
1221// IEMOP_MNEMONIC(vmovaps_Wps_Vps, "vmovaps Wps,Vps");
1222// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1223// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1224// {
1225// /*
1226// * Register, register.
1227// */
1228// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229// IEM_MC_BEGIN(0, 0);
1230// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1231// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1232// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1233// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1234// IEM_MC_ADVANCE_RIP();
1235// IEM_MC_END();
1236// }
1237// else
1238// {
1239// /*
1240// * Memory, register.
1241// */
1242// IEM_MC_BEGIN(0, 2);
1243// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1244// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1245//
1246// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1247// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1248// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1249// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1250//
1251// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1252// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1253//
1254// IEM_MC_ADVANCE_RIP();
1255// IEM_MC_END();
1256// }
1257// return VINF_SUCCESS;
1258//}
1259
1260/** Opcode VEX.66.0F 0x29 - vmovapd Wpd,Vpd */
1261FNIEMOP_STUB(iemOp_vmovapd_Wpd_Vpd);
1262//FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
1263//{
1264// IEMOP_MNEMONIC(vmovapd_Wpd_Vpd, "movapd Wpd,Vpd");
1265// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1266// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1267// {
1268// /*
1269// * Register, register.
1270// */
1271// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1272// IEM_MC_BEGIN(0, 0);
1273// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1274// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1275// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1276// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1277// IEM_MC_ADVANCE_RIP();
1278// IEM_MC_END();
1279// }
1280// else
1281// {
1282// /*
1283// * Memory, register.
1284// */
1285// IEM_MC_BEGIN(0, 2);
1286// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1287// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1288//
1289// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1290// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1291// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1292// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1293//
1294// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1295// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1296//
1297// IEM_MC_ADVANCE_RIP();
1298// IEM_MC_END();
1299// }
1300// return VINF_SUCCESS;
1301//}
1302
1303/* Opcode VEX.F3.0F 0x29 - invalid */
1304/* Opcode VEX.F2.0F 0x29 - invalid */
1305
1306
1307/** Opcode VEX.0F 0x2a - invalid */
1308/** Opcode VEX.66.0F 0x2a - invalid */
1309/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
1310FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
1311/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
1312FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
1313
1314
1315/** Opcode VEX.0F 0x2b - vmovntps Mps, Vps */
1316FNIEMOP_STUB(iemOp_vmovntps_Mps_Vps);
1317//FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
1318//{
1319// IEMOP_MNEMONIC(vmovntps_Mps_Vps, "movntps Mps,Vps");
1320// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1321// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1322// {
1323// /*
1324// * memory, register.
1325// */
1326// IEM_MC_BEGIN(0, 2);
1327// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1328// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1329//
1330// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1331// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1332// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1333// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1334//
1335// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1336// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1337//
1338// IEM_MC_ADVANCE_RIP();
1339// IEM_MC_END();
1340// }
1341// /* The register, register encoding is invalid. */
1342// else
1343// return IEMOP_RAISE_INVALID_OPCODE();
1344// return VINF_SUCCESS;
1345//}
1346
1347/** Opcode VEX.66.0F 0x2b - vmovntpd Mpd, Vpd */
1348FNIEMOP_STUB(iemOp_vmovntpd_Mpd_Vpd);
1349//FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
1350//{
1351// IEMOP_MNEMONIC(vmovntpd_Mpd_Vpd, "movntpd Mdq,Vpd");
1352// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1353// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1354// {
1355// /*
1356// * memory, register.
1357// */
1358// IEM_MC_BEGIN(0, 2);
1359// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
1360// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1361//
1362// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1363// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1364// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1365// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1366//
1367// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1368// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1369//
1370// IEM_MC_ADVANCE_RIP();
1371// IEM_MC_END();
1372// }
1373// /* The register, register encoding is invalid. */
1374// else
1375// return IEMOP_RAISE_INVALID_OPCODE();
1376// return VINF_SUCCESS;
1377//}
1378/* Opcode VEX.F3.0F 0x2b - invalid */
1379/* Opcode VEX.F2.0F 0x2b - invalid */
1380
1381
1382/* Opcode VEX.0F 0x2c - invalid */
1383/* Opcode VEX.66.0F 0x2c - invalid */
1384/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
1385FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
1386/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
1387FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1388
1389/* Opcode VEX.0F 0x2d - invalid */
1390/* Opcode VEX.66.0F 0x2d - invalid */
1391/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1392FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1393/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1394FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1395
1396/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1397FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1398/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1399FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1400/* Opcode VEX.F3.0F 0x2e - invalid */
1401/* Opcode VEX.F2.0F 0x2e - invalid */
1402
1403/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1404FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1405/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1406FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1407/* Opcode VEX.F3.0F 0x2f - invalid */
1408/* Opcode VEX.F2.0F 0x2f - invalid */
1409
1410/* Opcode VEX.0F 0x30 - invalid */
1411/* Opcode VEX.0F 0x31 - invalid */
1412/* Opcode VEX.0F 0x32 - invalid */
1413/* Opcode VEX.0F 0x33 - invalid */
1414/* Opcode VEX.0F 0x34 - invalid */
1415/* Opcode VEX.0F 0x35 - invalid */
1416/* Opcode VEX.0F 0x36 - invalid */
1417/* Opcode VEX.0F 0x37 - invalid */
1418/* Opcode VEX.0F 0x38 - invalid */
1419/* Opcode VEX.0F 0x39 - invalid */
1420/* Opcode VEX.0F 0x3a - invalid */
1421/* Opcode VEX.0F 0x3b - invalid */
1422/* Opcode VEX.0F 0x3c - invalid */
1423/* Opcode VEX.0F 0x3d - invalid */
1424/* Opcode VEX.0F 0x3e - invalid */
1425/* Opcode VEX.0F 0x3f - invalid */
1426/* Opcode VEX.0F 0x40 - invalid */
1427/* Opcode VEX.0F 0x41 - invalid */
1428/* Opcode VEX.0F 0x42 - invalid */
1429/* Opcode VEX.0F 0x43 - invalid */
1430/* Opcode VEX.0F 0x44 - invalid */
1431/* Opcode VEX.0F 0x45 - invalid */
1432/* Opcode VEX.0F 0x46 - invalid */
1433/* Opcode VEX.0F 0x47 - invalid */
1434/* Opcode VEX.0F 0x48 - invalid */
1435/* Opcode VEX.0F 0x49 - invalid */
1436/* Opcode VEX.0F 0x4a - invalid */
1437/* Opcode VEX.0F 0x4b - invalid */
1438/* Opcode VEX.0F 0x4c - invalid */
1439/* Opcode VEX.0F 0x4d - invalid */
1440/* Opcode VEX.0F 0x4e - invalid */
1441/* Opcode VEX.0F 0x4f - invalid */
1442
1443/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1444FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1445/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1446FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1447/* Opcode VEX.F3.0F 0x50 - invalid */
1448/* Opcode VEX.F2.0F 0x50 - invalid */
1449
1450/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1451FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1452/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1453FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1454/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1455FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1456/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1457FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1458
1459/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1460FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1461/* Opcode VEX.66.0F 0x52 - invalid */
1462/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1463FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1464/* Opcode VEX.F2.0F 0x52 - invalid */
1465
1466/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1467FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1468/* Opcode VEX.66.0F 0x53 - invalid */
1469/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1470FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1471/* Opcode VEX.F2.0F 0x53 - invalid */
1472
1473/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1474FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1475/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1476FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1477/* Opcode VEX.F3.0F 0x54 - invalid */
1478/* Opcode VEX.F2.0F 0x54 - invalid */
1479
1480/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1481FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1482/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1483FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1484/* Opcode VEX.F3.0F 0x55 - invalid */
1485/* Opcode VEX.F2.0F 0x55 - invalid */
1486
1487/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1488FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1489/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1490FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1491/* Opcode VEX.F3.0F 0x56 - invalid */
1492/* Opcode VEX.F2.0F 0x56 - invalid */
1493
1494/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
1495FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1496/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
1497FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1498/* Opcode VEX.F3.0F 0x57 - invalid */
1499/* Opcode VEX.F2.0F 0x57 - invalid */
1500
1501/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
1502FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1503/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
1504FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1505/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
1506FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1507/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
1508FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1509
1510/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
1511FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1512/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
1513FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1514/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
1515FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1516/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
1517FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1518
1519/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
1520FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1521/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
1522FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1523/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
1524FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1525/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1526FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1527
1528/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
1529FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1530/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
1531FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1532/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
1533FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1534/* Opcode VEX.F2.0F 0x5b - invalid */
1535
1536/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
1537FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
1538/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
1539FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
1540/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
1541FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
1542/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
1543FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
1544
1545/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
1546FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
1547/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
1548FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
1549/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
1550FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
1551/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
1552FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
1553
1554/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
1555FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
1556/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
1557FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
1558/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
1559FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
1560/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
1561FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
1562
1563/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
1564FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
1565/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
1566FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
1567/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
1568FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
1569/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
1570FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
1571
1572
1573///**
1574// * Common worker for SSE2 instructions on the forms:
1575// * pxxxx xmm1, xmm2/mem128
1576// *
1577// * The 2nd operand is the first half of a register, which in the memory case
1578// * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1579// * memory accessed for MMX.
1580// *
1581// * Exceptions type 4.
1582// */
1583//FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1584//{
1585// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1586// if (!pImpl->pfnU64)
1587// return IEMOP_RAISE_INVALID_OPCODE();
1588// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1589// {
1590// /*
1591// * Register, register.
1592// */
1593// /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
1594// /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
1595// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1596// IEM_MC_BEGIN(2, 0);
1597// IEM_MC_ARG(uint64_t *, pDst, 0);
1598// IEM_MC_ARG(uint32_t const *, pSrc, 1);
1599// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1600// IEM_MC_PREPARE_FPU_USAGE();
1601// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1602// IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
1603// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1604// IEM_MC_ADVANCE_RIP();
1605// IEM_MC_END();
1606// }
1607// else
1608// {
1609// /*
1610// * Register, memory.
1611// */
1612// IEM_MC_BEGIN(2, 2);
1613// IEM_MC_ARG(uint64_t *, pDst, 0);
1614// IEM_MC_LOCAL(uint32_t, uSrc);
1615// IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
1616// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1617//
1618// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1619// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1620// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1621// IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1622//
1623// IEM_MC_PREPARE_FPU_USAGE();
1624// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1625// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1626//
1627// IEM_MC_ADVANCE_RIP();
1628// IEM_MC_END();
1629// }
1630// return VINF_SUCCESS;
1631//}
1632
1633
1634/* Opcode VEX.0F 0x60 - invalid */
1635
1636/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, W */
1637FNIEMOP_STUB(iemOp_vpunpcklbw_Vx_Hx_Wx);
1638//FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
1639//{
1640// IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
1641// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
1642//}
1643
1644/* Opcode VEX.F3.0F 0x60 - invalid */
1645
1646
1647/* Opcode VEX.0F 0x61 - invalid */
1648
1649/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
1650FNIEMOP_STUB(iemOp_vpunpcklwd_Vx_Hx_Wx);
1651//FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
1652//{
1653// IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
1654// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
1655//}
1656
1657/* Opcode VEX.F3.0F 0x61 - invalid */
1658
1659
1660/* Opcode VEX.0F 0x62 - invalid */
1661
1662/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
1663FNIEMOP_STUB(iemOp_vpunpckldq_Vx_Hx_Wx);
1664//FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
1665//{
1666// IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
1667// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
1668//}
1669
1670/* Opcode VEX.F3.0F 0x62 - invalid */
1671
1672
1673
1674/* Opcode VEX.0F 0x63 - invalid */
1675/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
1676FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
1677/* Opcode VEX.F3.0F 0x63 - invalid */
1678
1679/* Opcode VEX.0F 0x64 - invalid */
1680/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
1681FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
1682/* Opcode VEX.F3.0F 0x64 - invalid */
1683
1684/* Opcode VEX.0F 0x65 - invalid */
1685/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
1686FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
1687/* Opcode VEX.F3.0F 0x65 - invalid */
1688
1689/* Opcode VEX.0F 0x66 - invalid */
1690/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
1691FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
1692/* Opcode VEX.F3.0F 0x66 - invalid */
1693
1694/* Opcode VEX.0F 0x67 - invalid */
1695/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
1696FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
1697/* Opcode VEX.F3.0F 0x67 - invalid */
1698
1699
1700///**
1701// * Common worker for SSE2 instructions on the form:
1702// * pxxxx xmm1, xmm2/mem128
1703// *
1704// * The 2nd operand is the second half of a register, which in the memory case
1705// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
1706// * where it may read the full 128 bits or only the upper 64 bits.
1707// *
1708// * Exceptions type 4.
1709// */
1710//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
1711//{
1712// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1713// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1714// {
1715// /*
1716// * Register, register.
1717// */
1718// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1719// IEM_MC_BEGIN(2, 0);
1720// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1721// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1722// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1723// IEM_MC_PREPARE_SSE_USAGE();
1724// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1725// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1726// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1727// IEM_MC_ADVANCE_RIP();
1728// IEM_MC_END();
1729// }
1730// else
1731// {
1732// /*
1733// * Register, memory.
1734// */
1735// IEM_MC_BEGIN(2, 2);
1736// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1737// IEM_MC_LOCAL(RTUINT128U, uSrc);
1738// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1739// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1740//
1741// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1742// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1743// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1744// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
1745//
1746// IEM_MC_PREPARE_SSE_USAGE();
1747// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1748// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1749//
1750// IEM_MC_ADVANCE_RIP();
1751// IEM_MC_END();
1752// }
1753// return VINF_SUCCESS;
1754//}
1755
1756
1757/* Opcode VEX.0F 0x68 - invalid */
1758
1759/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
1760FNIEMOP_STUB(iemOp_vpunpckhbw_Vx_Hx_Wx);
1761//FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
1762//{
1763// IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
1764// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
1765//}
1766/* Opcode VEX.F3.0F 0x68 - invalid */
1767
1768
1769/* Opcode VEX.0F 0x69 - invalid */
1770
1771/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
1772FNIEMOP_STUB(iemOp_vpunpckhwd_Vx_Hx_Wx);
1773//FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
1774//{
1775// IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
1776// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
1777//
1778//}
1779/* Opcode VEX.F3.0F 0x69 - invalid */
1780
1781
1782/* Opcode VEX.0F 0x6a - invalid */
1783
1784/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
1785FNIEMOP_STUB(iemOp_vpunpckhdq_Vx_Hx_W);
1786//FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
1787//{
1788// IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
1789// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
1790//}
1791/* Opcode VEX.F3.0F 0x6a - invalid */
1792
1793
1794/* Opcode VEX.0F 0x6b - invalid */
1795/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
1796FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
1797/* Opcode VEX.F3.0F 0x6b - invalid */
1798
1799
1800/* Opcode VEX.0F 0x6c - invalid */
1801
1802/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
1803FNIEMOP_STUB(iemOp_vpunpcklqdq_Vx_Hx_Wx);
1804//FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
1805//{
1806// IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
1807// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
1808//}
1809
1810/* Opcode VEX.F3.0F 0x6c - invalid */
1811/* Opcode VEX.F2.0F 0x6c - invalid */
1812
1813
1814/* Opcode VEX.0F 0x6d - invalid */
1815
1816/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
1817FNIEMOP_STUB(iemOp_vpunpckhqdq_Vx_Hx_W);
1818//FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
1819//{
1820// IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
1821// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
1822//}
1823
1824/* Opcode VEX.F3.0F 0x6d - invalid */
1825
1826
1827/* Opcode VEX.0F 0x6e - invalid */
1828
1829/** Opcode VEX.66.0F 0x6e - vmovd/q Vy, Ey */
1830FNIEMOP_STUB(iemOp_vmovd_q_Vy_Ey);
1831//FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
1832//{
1833// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1834// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1835// IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
1836// else
1837// IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
1838// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1839// {
1840// /* XMM, greg*/
1841// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1842// IEM_MC_BEGIN(0, 1);
1843// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1844// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1845// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1846// {
1847// IEM_MC_LOCAL(uint64_t, u64Tmp);
1848// IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1849// IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1850// }
1851// else
1852// {
1853// IEM_MC_LOCAL(uint32_t, u32Tmp);
1854// IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1855// IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1856// }
1857// IEM_MC_ADVANCE_RIP();
1858// IEM_MC_END();
1859// }
1860// else
1861// {
1862// /* XMM, [mem] */
1863// IEM_MC_BEGIN(0, 2);
1864// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1865// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
1866// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1867// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1868// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1869// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1870// {
1871// IEM_MC_LOCAL(uint64_t, u64Tmp);
1872// IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1873// IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1874// }
1875// else
1876// {
1877// IEM_MC_LOCAL(uint32_t, u32Tmp);
1878// IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1879// IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1880// }
1881// IEM_MC_ADVANCE_RIP();
1882// IEM_MC_END();
1883// }
1884// return VINF_SUCCESS;
1885//}
1886
1887/* Opcode VEX.F3.0F 0x6e - invalid */
1888
1889
1890/* Opcode VEX.0F 0x6f - invalid */
1891
1892/** Opcode VEX.66.0F 0x6f - vmovdqa Vx, Wx */
1893FNIEMOP_STUB(iemOp_vmovdqa_Vx_Wx);
1894//FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
1895//{
1896// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1897// IEMOP_MNEMONIC(vmovdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
1898// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1899// {
1900// /*
1901// * Register, register.
1902// */
1903// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1904// IEM_MC_BEGIN(0, 0);
1905// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1906// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1907// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1908// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1909// IEM_MC_ADVANCE_RIP();
1910// IEM_MC_END();
1911// }
1912// else
1913// {
1914// /*
1915// * Register, memory.
1916// */
1917// IEM_MC_BEGIN(0, 2);
1918// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
1919// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1920//
1921// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1922// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1924// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1925// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1926// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
1927//
1928// IEM_MC_ADVANCE_RIP();
1929// IEM_MC_END();
1930// }
1931// return VINF_SUCCESS;
1932//}
1933
1934/** Opcode VEX.F3.0F 0x6f - vmovdqu Vx, Wx */
1935FNIEMOP_STUB(iemOp_vmovdqu_Vx_Wx);
1936//FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
1937//{
1938// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1939// IEMOP_MNEMONIC(vmovdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
1940// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1941// {
1942// /*
1943// * Register, register.
1944// */
1945// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1946// IEM_MC_BEGIN(0, 0);
1947// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1948// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1949// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1950// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1951// IEM_MC_ADVANCE_RIP();
1952// IEM_MC_END();
1953// }
1954// else
1955// {
1956// /*
1957// * Register, memory.
1958// */
1959// IEM_MC_BEGIN(0, 2);
1960// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
1961// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1962//
1963// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1964// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1966// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1967// IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1968// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
1969//
1970// IEM_MC_ADVANCE_RIP();
1971// IEM_MC_END();
1972// }
1973// return VINF_SUCCESS;
1974//}
1975
1976
1977/* Opcode VEX.0F 0x70 - invalid */
1978
1979/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
1980FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib);
1981//FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
1982//{
1983// IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
1984// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1985// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1986// {
1987// /*
1988// * Register, register.
1989// */
1990// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1991// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1992//
1993// IEM_MC_BEGIN(3, 0);
1994// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1995// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1996// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1997// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1998// IEM_MC_PREPARE_SSE_USAGE();
1999// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2000// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2001// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2002// IEM_MC_ADVANCE_RIP();
2003// IEM_MC_END();
2004// }
2005// else
2006// {
2007// /*
2008// * Register, memory.
2009// */
2010// IEM_MC_BEGIN(3, 2);
2011// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2012// IEM_MC_LOCAL(RTUINT128U, uSrc);
2013// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2014// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2015//
2016// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2017// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2018// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2019// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2020// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2021//
2022// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2023// IEM_MC_PREPARE_SSE_USAGE();
2024// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2025// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
2026//
2027// IEM_MC_ADVANCE_RIP();
2028// IEM_MC_END();
2029// }
2030// return VINF_SUCCESS;
2031//}
2032
2033/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
2034FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib);
2035//FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
2036//{
2037// IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
2038// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2039// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2040// {
2041// /*
2042// * Register, register.
2043// */
2044// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2045// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2046//
2047// IEM_MC_BEGIN(3, 0);
2048// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2049// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2050// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2051// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2052// IEM_MC_PREPARE_SSE_USAGE();
2053// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2054// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2055// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2056// IEM_MC_ADVANCE_RIP();
2057// IEM_MC_END();
2058// }
2059// else
2060// {
2061// /*
2062// * Register, memory.
2063// */
2064// IEM_MC_BEGIN(3, 2);
2065// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2066// IEM_MC_LOCAL(RTUINT128U, uSrc);
2067// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2068// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2069//
2070// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2071// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2072// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2073// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2074// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2075//
2076// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2077// IEM_MC_PREPARE_SSE_USAGE();
2078// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2079// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
2080//
2081// IEM_MC_ADVANCE_RIP();
2082// IEM_MC_END();
2083// }
2084// return VINF_SUCCESS;
2085//}
2086
2087/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
2088FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib);
2089//FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
2090//{
2091// IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
2092// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2093// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2094// {
2095// /*
2096// * Register, register.
2097// */
2098// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2099// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2100//
2101// IEM_MC_BEGIN(3, 0);
2102// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2103// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2104// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2105// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2106// IEM_MC_PREPARE_SSE_USAGE();
2107// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2109// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2110// IEM_MC_ADVANCE_RIP();
2111// IEM_MC_END();
2112// }
2113// else
2114// {
2115// /*
2116// * Register, memory.
2117// */
2118// IEM_MC_BEGIN(3, 2);
2119// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2120// IEM_MC_LOCAL(RTUINT128U, uSrc);
2121// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2122// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2123//
2124// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2125// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2126// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2127// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2128// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2129//
2130// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2131// IEM_MC_PREPARE_SSE_USAGE();
2132// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2133// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
2134//
2135// IEM_MC_ADVANCE_RIP();
2136// IEM_MC_END();
2137// }
2138// return VINF_SUCCESS;
2139//}
2140
2141
2142/* Opcode VEX.0F 0x71 11/2 - invalid. */
2143/** Opcode VEX.66.0F 0x71 11/2. */
2144FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
2145
2146/* Opcode VEX.0F 0x71 11/4 - invalid */
2147/** Opcode VEX.66.0F 0x71 11/4. */
2148FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
2149
2150/* Opcode VEX.0F 0x71 11/6 - invalid */
2151/** Opcode VEX.66.0F 0x71 11/6. */
2152FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
2153
2154
2155/**
2156 * VEX Group 12 jump table for register variant.
2157 */
2158IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
2159{
2160 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2161 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2162 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2163 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2164 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2165 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2166 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2167 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2168};
2169AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
2170
2171
2172/** Opcode VEX.0F 0x71. */
2173FNIEMOP_DEF(iemOp_VGrp12)
2174{
2175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2176 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2177 /* register, register */
2178 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2179 + pVCpu->iem.s.idxPrefix], bRm);
2180 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2181}
2182
2183
2184/* Opcode VEX.0F 0x72 11/2 - invalid. */
2185/** Opcode VEX.66.0F 0x72 11/2. */
2186FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
2187
2188/* Opcode VEX.0F 0x72 11/4 - invalid. */
2189/** Opcode VEX.66.0F 0x72 11/4. */
2190FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
2191
2192/* Opcode VEX.0F 0x72 11/6 - invalid. */
2193/** Opcode VEX.66.0F 0x72 11/6. */
2194FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
2195
2196
2197/**
2198 * Group 13 jump table for register variant.
2199 */
2200IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
2201{
2202 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2203 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2204 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2205 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2206 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2207 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2208 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2209 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
2210};
2211AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
2212
2213/** Opcode VEX.0F 0x72. */
2214FNIEMOP_DEF(iemOp_VGrp13)
2215{
2216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2217 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2218 /* register, register */
2219 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2220 + pVCpu->iem.s.idxPrefix], bRm);
2221 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2222}
2223
2224
2225/* Opcode VEX.0F 0x73 11/2 - invalid. */
2226/** Opcode VEX.66.0F 0x73 11/2. */
2227FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
2228
2229/** Opcode VEX.66.0F 0x73 11/3. */
2230FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
2231
2232/* Opcode VEX.0F 0x73 11/6 - invalid. */
2233/** Opcode VEX.66.0F 0x73 11/6. */
2234FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
2235
2236/** Opcode VEX.66.0F 0x73 11/7. */
2237FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
2238
2239/**
2240 * Group 14 jump table for register variant.
2241 */
2242IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
2243{
2244 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2245 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2246 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2247 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2248 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2249 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
2250 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2251 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
2252};
2253AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
2254
2255
2256/** Opcode VEX.0F 0x73. */
2257FNIEMOP_DEF(iemOp_VGrp14)
2258{
2259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2260 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2261 /* register, register */
2262 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2263 + pVCpu->iem.s.idxPrefix], bRm);
2264 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
2265}
2266
2267
2268///**
2269// * Common worker for SSE2 instructions on the forms:
2270// * pxxx xmm1, xmm2/mem128
2271// *
2272// * Proper alignment of the 128-bit operand is enforced.
2273// * Exceptions type 4. SSE2 cpuid checks.
2274// */
2275//FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2276//{
2277// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2278// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2279// {
2280// /*
2281// * Register, register.
2282// */
2283// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284// IEM_MC_BEGIN(2, 0);
2285// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2286// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2287// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2288// IEM_MC_PREPARE_SSE_USAGE();
2289// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2290// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2291// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2292// IEM_MC_ADVANCE_RIP();
2293// IEM_MC_END();
2294// }
2295// else
2296// {
2297// /*
2298// * Register, memory.
2299// */
2300// IEM_MC_BEGIN(2, 2);
2301// IEM_MC_ARG(PRTUINT128U, pDst, 0);
2302// IEM_MC_LOCAL(RTUINT128U, uSrc);
2303// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
2304// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2305//
2306// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2307// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2308// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2309// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2310//
2311// IEM_MC_PREPARE_SSE_USAGE();
2312// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2313// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2314//
2315// IEM_MC_ADVANCE_RIP();
2316// IEM_MC_END();
2317// }
2318// return VINF_SUCCESS;
2319//}
2320
2321
2322/* Opcode VEX.0F 0x74 - invalid */
2323
2324/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
2325FNIEMOP_STUB(iemOp_vpcmpeqb_Vx_Hx_Wx);
2326//FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
2327//{
2328// IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
2329// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2330//}
2331
2332/* Opcode VEX.F3.0F 0x74 - invalid */
2333/* Opcode VEX.F2.0F 0x74 - invalid */
2334
2335
2336/* Opcode VEX.0F 0x75 - invalid */
2337
2338/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
2339FNIEMOP_STUB(iemOp_vpcmpeqw_Vx_Hx_Wx);
2340//FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
2341//{
2342// IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
2343// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2344//}
2345
2346/* Opcode VEX.F3.0F 0x75 - invalid */
2347/* Opcode VEX.F2.0F 0x75 - invalid */
2348
2349
2350/* Opcode VEX.0F 0x76 - invalid */
2351
2352/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
2353FNIEMOP_STUB(iemOp_vpcmpeqd_Vx_Hx_Wx);
2354//FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
2355//{
2356// IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
2357// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2358//}
2359
2360/* Opcode VEX.F3.0F 0x76 - invalid */
2361/* Opcode VEX.F2.0F 0x76 - invalid */
2362
2363
2364/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
2365FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
2366/* Opcode VEX.66.0F 0x77 - invalid */
2367/* Opcode VEX.F3.0F 0x77 - invalid */
2368/* Opcode VEX.F2.0F 0x77 - invalid */
2369
2370/* Opcode VEX.0F 0x78 - invalid */
2371/* Opcode VEX.66.0F 0x78 - invalid */
2372/* Opcode VEX.F3.0F 0x78 - invalid */
2373/* Opcode VEX.F2.0F 0x78 - invalid */
2374
2375/* Opcode VEX.0F 0x79 - invalid */
2376/* Opcode VEX.66.0F 0x79 - invalid */
2377/* Opcode VEX.F3.0F 0x79 - invalid */
2378/* Opcode VEX.F2.0F 0x79 - invalid */
2379
2380/* Opcode VEX.0F 0x7a - invalid */
2381/* Opcode VEX.66.0F 0x7a - invalid */
2382/* Opcode VEX.F3.0F 0x7a - invalid */
2383/* Opcode VEX.F2.0F 0x7a - invalid */
2384
2385/* Opcode VEX.0F 0x7b - invalid */
2386/* Opcode VEX.66.0F 0x7b - invalid */
2387/* Opcode VEX.F3.0F 0x7b - invalid */
2388/* Opcode VEX.F2.0F 0x7b - invalid */
2389
2390/* Opcode VEX.0F 0x7c - invalid */
2391/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2392FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2393/* Opcode VEX.F3.0F 0x7c - invalid */
2394/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2395FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2396
2397/* Opcode VEX.0F 0x7d - invalid */
2398/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2399FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2400/* Opcode VEX.F3.0F 0x7d - invalid */
2401/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2402FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2403
2404
2405/* Opcode VEX.0F 0x7e - invalid */
2406
2407/** Opcode VEX.66.0F 0x7e - vmovd_q Ey, Vy */
2408FNIEMOP_STUB(iemOp_vmovd_q_Ey_Vy);
2409//FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2410//{
2411// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2412// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2413// IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
2414// else
2415// IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
2416// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2417// {
2418// /* greg, XMM */
2419// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2420// IEM_MC_BEGIN(0, 1);
2421// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2422// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2423// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2424// {
2425// IEM_MC_LOCAL(uint64_t, u64Tmp);
2426// IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2427// IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
2428// }
2429// else
2430// {
2431// IEM_MC_LOCAL(uint32_t, u32Tmp);
2432// IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2433// IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
2434// }
2435// IEM_MC_ADVANCE_RIP();
2436// IEM_MC_END();
2437// }
2438// else
2439// {
2440// /* [mem], XMM */
2441// IEM_MC_BEGIN(0, 2);
2442// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2443// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2444// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2445// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2446// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2447// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2448// {
2449// IEM_MC_LOCAL(uint64_t, u64Tmp);
2450// IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2451// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
2452// }
2453// else
2454// {
2455// IEM_MC_LOCAL(uint32_t, u32Tmp);
2456// IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2457// IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
2458// }
2459// IEM_MC_ADVANCE_RIP();
2460// IEM_MC_END();
2461// }
2462// return VINF_SUCCESS;
2463//}
2464
2465/** Opcode VEX.F3.0F 0x7e - vmovq Vq, Wq */
2466FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
2467/* Opcode VEX.F2.0F 0x7e - invalid */
2468
2469
2470/* Opcode VEX.0F 0x7f - invalid */
2471
2472/** Opcode VEX.66.0F 0x7f - vmovdqa Wx,Vx */
2473FNIEMOP_STUB(iemOp_vmovdqa_Wx_Vx);
2474//FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
2475//{
2476// IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
2477// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2478// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2479// {
2480// /*
2481// * Register, register.
2482// */
2483// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2484// IEM_MC_BEGIN(0, 0);
2485// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2486// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2487// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2488// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2489// IEM_MC_ADVANCE_RIP();
2490// IEM_MC_END();
2491// }
2492// else
2493// {
2494// /*
2495// * Register, memory.
2496// */
2497// IEM_MC_BEGIN(0, 2);
2498// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2499// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2500//
2501// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2502// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2504// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2505//
2506// IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2507// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
2508//
2509// IEM_MC_ADVANCE_RIP();
2510// IEM_MC_END();
2511// }
2512// return VINF_SUCCESS;
2513//}
2514
2515/** Opcode VEX.F3.0F 0x7f - vmovdqu Wx,Vx */
2516FNIEMOP_STUB(iemOp_vmovdqu_Wx_Vx);
2517//FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
2518//{
2519// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2520// IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
2521// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2522// {
2523// /*
2524// * Register, register.
2525// */
2526// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2527// IEM_MC_BEGIN(0, 0);
2528// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2529// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2530// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2531// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2532// IEM_MC_ADVANCE_RIP();
2533// IEM_MC_END();
2534// }
2535// else
2536// {
2537// /*
2538// * Register, memory.
2539// */
2540// IEM_MC_BEGIN(0, 2);
2541// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2542// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2543//
2544// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2545// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2546// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2547// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2548//
2549// IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2550// IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
2551//
2552// IEM_MC_ADVANCE_RIP();
2553// IEM_MC_END();
2554// }
2555// return VINF_SUCCESS;
2556//}
2557
2558/* Opcode VEX.F2.0F 0x7f - invalid */
2559
2560
2561/* Opcode VEX.0F 0x80 - invalid */
2562/* Opcode VEX.0F 0x81 - invalid */
2563/* Opcode VEX.0F 0x82 - invalid */
2564/* Opcode VEX.0F 0x83 - invalid */
2565/* Opcode VEX.0F 0x84 - invalid */
2566/* Opcode VEX.0F 0x85 - invalid */
2567/* Opcode VEX.0F 0x86 - invalid */
2568/* Opcode VEX.0F 0x87 - invalid */
2569/* Opcode VEX.0F 0x88 - invalid */
2570/* Opcode VEX.0F 0x89 - invalid */
2571/* Opcode VEX.0F 0x8a - invalid */
2572/* Opcode VEX.0F 0x8b - invalid */
2573/* Opcode VEX.0F 0x8c - invalid */
2574/* Opcode VEX.0F 0x8d - invalid */
2575/* Opcode VEX.0F 0x8e - invalid */
2576/* Opcode VEX.0F 0x8f - invalid */
2577/* Opcode VEX.0F 0x90 - invalid */
2578/* Opcode VEX.0F 0x91 - invalid */
2579/* Opcode VEX.0F 0x92 - invalid */
2580/* Opcode VEX.0F 0x93 - invalid */
2581/* Opcode VEX.0F 0x94 - invalid */
2582/* Opcode VEX.0F 0x95 - invalid */
2583/* Opcode VEX.0F 0x96 - invalid */
2584/* Opcode VEX.0F 0x97 - invalid */
2585/* Opcode VEX.0F 0x98 - invalid */
2586/* Opcode VEX.0F 0x99 - invalid */
2587/* Opcode VEX.0F 0x9a - invalid */
2588/* Opcode VEX.0F 0x9b - invalid */
2589/* Opcode VEX.0F 0x9c - invalid */
2590/* Opcode VEX.0F 0x9d - invalid */
2591/* Opcode VEX.0F 0x9e - invalid */
2592/* Opcode VEX.0F 0x9f - invalid */
2593/* Opcode VEX.0F 0xa0 - invalid */
2594/* Opcode VEX.0F 0xa1 - invalid */
2595/* Opcode VEX.0F 0xa2 - invalid */
2596/* Opcode VEX.0F 0xa3 - invalid */
2597/* Opcode VEX.0F 0xa4 - invalid */
2598/* Opcode VEX.0F 0xa5 - invalid */
2599/* Opcode VEX.0F 0xa6 - invalid */
2600/* Opcode VEX.0F 0xa7 - invalid */
2601/* Opcode VEX.0F 0xa8 - invalid */
2602/* Opcode VEX.0F 0xa9 - invalid */
2603/* Opcode VEX.0F 0xaa - invalid */
2604/* Opcode VEX.0F 0xab - invalid */
2605/* Opcode VEX.0F 0xac - invalid */
2606/* Opcode VEX.0F 0xad - invalid */
2607
2608
2609/* Opcode VEX.0F 0xae mem/0 - invalid. */
2610/* Opcode VEX.0F 0xae mem/1 - invalid. */
2611
2612/**
2613 * @ opmaps grp15
2614 * @ opcode !11/2
2615 * @ oppfx none
2616 * @ opcpuid sse
2617 * @ opgroup og_sse_mxcsrsm
2618 * @ opxcpttype 5
2619 * @ optest op1=0 -> mxcsr=0
2620 * @ optest op1=0x2083 -> mxcsr=0x2083
2621 * @ optest op1=0xfffffffe -> value.xcpt=0xd
2622 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
2623 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
2624 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
2625 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
2626 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
2627 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
2628 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
2629 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
2630 */
2631FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
2632//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
2633//{
2634// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2635// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
2636// return IEMOP_RAISE_INVALID_OPCODE();
2637//
2638// IEM_MC_BEGIN(2, 0);
2639// IEM_MC_ARG(uint8_t, iEffSeg, 0);
2640// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
2641// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
2642// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2643// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2644// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
2645// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
2646// IEM_MC_END();
2647// return VINF_SUCCESS;
2648//}
2649
2650
2651/**
2652 * @opmaps vexgrp15
2653 * @opcode !11/3
2654 * @oppfx none
2655 * @opcpuid avx
2656 * @opgroup og_avx_mxcsrsm
2657 * @opxcpttype 5
2658 * @optest mxcsr=0 -> op1=0
2659 * @optest mxcsr=0x2083 -> op1=0x2083
2660 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
2661 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
2662 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
2663 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
2664 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
2665 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
2666 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
2667 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
2668 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
2669 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
2670 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
2671 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
2672 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
2673 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
2674 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
2675 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
2676 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
2677 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
2678 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
2679 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
2680 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
2681 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
2682 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
2683 * -> value.xcpt=0x6
2684 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
2685 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
2686 * APMv4 rev 3.17 page 509.
2687 * @todo Test this instruction on AMD Ryzen.
2688 */
2689FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
2690{
2691 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2692 IEM_MC_BEGIN(2, 0);
2693 IEM_MC_ARG(uint8_t, iEffSeg, 0);
2694 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
2695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
2696 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2697 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2698 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
2699 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
2700 IEM_MC_END();
2701 return VINF_SUCCESS;
2702}
2703
2704/* Opcode VEX.0F 0xae mem/4 - invalid. */
2705/* Opcode VEX.0F 0xae mem/5 - invalid. */
2706/* Opcode VEX.0F 0xae mem/6 - invalid. */
2707/* Opcode VEX.0F 0xae mem/7 - invalid. */
2708
2709/* Opcode VEX.0F 0xae 11b/0 - invalid. */
2710/* Opcode VEX.0F 0xae 11b/1 - invalid. */
2711/* Opcode VEX.0F 0xae 11b/2 - invalid. */
2712/* Opcode VEX.0F 0xae 11b/3 - invalid. */
2713/* Opcode VEX.0F 0xae 11b/4 - invalid. */
2714/* Opcode VEX.0F 0xae 11b/5 - invalid. */
2715/* Opcode VEX.0F 0xae 11b/6 - invalid. */
2716/* Opcode VEX.0F 0xae 11b/7 - invalid. */
2717
2718/**
2719 * Vex group 15 jump table for memory variant.
2720 */
2721IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
2722{ /* pfx: none, 066h, 0f3h, 0f2h */
2723 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2724 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2725 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2726 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2727 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2728 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2729 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2730 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2731};
2732AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
2733
2734
2735/** Opcode vex. 0xae. */
2736FNIEMOP_DEF(iemOp_VGrp15)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2740 /* register, register */
2741 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
2742
2743 /* memory, register */
2744 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2745 + pVCpu->iem.s.idxPrefix], bRm);
2746}
2747
2748
2749/* Opcode VEX.0F 0xaf - invalid. */
2750
2751/* Opcode VEX.0F 0xb0 - invalid. */
2752/* Opcode VEX.0F 0xb1 - invalid. */
2753/* Opcode VEX.0F 0xb2 - invalid. */
2754/* Opcode VEX.0F 0xb2 - invalid. */
2755/* Opcode VEX.0F 0xb3 - invalid. */
2756/* Opcode VEX.0F 0xb4 - invalid. */
2757/* Opcode VEX.0F 0xb5 - invalid. */
2758/* Opcode VEX.0F 0xb6 - invalid. */
2759/* Opcode VEX.0F 0xb7 - invalid. */
2760/* Opcode VEX.0F 0xb8 - invalid. */
2761/* Opcode VEX.0F 0xb9 - invalid. */
2762/* Opcode VEX.0F 0xba - invalid. */
2763/* Opcode VEX.0F 0xbb - invalid. */
2764/* Opcode VEX.0F 0xbc - invalid. */
2765/* Opcode VEX.0F 0xbd - invalid. */
2766/* Opcode VEX.0F 0xbe - invalid. */
2767/* Opcode VEX.0F 0xbf - invalid. */
2768
2769/* Opcode VEX.0F 0xc0 - invalid. */
2770/* Opcode VEX.66.0F 0xc0 - invalid. */
2771/* Opcode VEX.F3.0F 0xc0 - invalid. */
2772/* Opcode VEX.F2.0F 0xc0 - invalid. */
2773
2774/* Opcode VEX.0F 0xc1 - invalid. */
2775/* Opcode VEX.66.0F 0xc1 - invalid. */
2776/* Opcode VEX.F3.0F 0xc1 - invalid. */
2777/* Opcode VEX.F2.0F 0xc1 - invalid. */
2778
2779/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
2780FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
2781/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
2782FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
2783/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
2784FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
2785/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
2786FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
2787
2788/* Opcode VEX.0F 0xc3 - invalid */
2789/* Opcode VEX.66.0F 0xc3 - invalid */
2790/* Opcode VEX.F3.0F 0xc3 - invalid */
2791/* Opcode VEX.F2.0F 0xc3 - invalid */
2792
2793/* Opcode VEX.0F 0xc4 - invalid */
2794/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
2795FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
2796/* Opcode VEX.F3.0F 0xc4 - invalid */
2797/* Opcode VEX.F2.0F 0xc4 - invalid */
2798
2799/* Opcode VEX.0F 0xc5 - invlid */
2800/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
2801FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
2802/* Opcode VEX.F3.0F 0xc5 - invalid */
2803/* Opcode VEX.F2.0F 0xc5 - invalid */
2804
2805/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
2806FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
2807/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
2808FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
2809/* Opcode VEX.F3.0F 0xc6 - invalid */
2810/* Opcode VEX.F2.0F 0xc6 - invalid */
2811
2812/* Opcode VEX.0F 0xc7 - invalid */
2813/* Opcode VEX.66.0F 0xc7 - invalid */
2814/* Opcode VEX.F3.0F 0xc7 - invalid */
2815/* Opcode VEX.F2.0F 0xc7 - invalid */
2816
2817/* Opcode VEX.0F 0xc8 - invalid */
2818/* Opcode VEX.0F 0xc9 - invalid */
2819/* Opcode VEX.0F 0xca - invalid */
2820/* Opcode VEX.0F 0xcb - invalid */
2821/* Opcode VEX.0F 0xcc - invalid */
2822/* Opcode VEX.0F 0xcd - invalid */
2823/* Opcode VEX.0F 0xce - invalid */
2824/* Opcode VEX.0F 0xcf - invalid */
2825
2826
2827/* Opcode VEX.0F 0xd0 - invalid */
2828/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
2829FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
2830/* Opcode VEX.F3.0F 0xd0 - invalid */
2831/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
2832FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
2833
2834/* Opcode VEX.0F 0xd1 - invalid */
2835/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
2836FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
2837/* Opcode VEX.F3.0F 0xd1 - invalid */
2838/* Opcode VEX.F2.0F 0xd1 - invalid */
2839
2840/* Opcode VEX.0F 0xd2 - invalid */
2841/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
2842FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
2843/* Opcode VEX.F3.0F 0xd2 - invalid */
2844/* Opcode VEX.F2.0F 0xd2 - invalid */
2845
2846/* Opcode VEX.0F 0xd3 - invalid */
2847/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
2848FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
2849/* Opcode VEX.F3.0F 0xd3 - invalid */
2850/* Opcode VEX.F2.0F 0xd3 - invalid */
2851
2852/* Opcode VEX.0F 0xd4 - invalid */
2853/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
2854FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
2855/* Opcode VEX.F3.0F 0xd4 - invalid */
2856/* Opcode VEX.F2.0F 0xd4 - invalid */
2857
2858/* Opcode VEX.0F 0xd5 - invalid */
2859/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
2860FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
2861/* Opcode VEX.F3.0F 0xd5 - invalid */
2862/* Opcode VEX.F2.0F 0xd5 - invalid */
2863
2864/* Opcode VEX.0F 0xd6 - invalid */
2865
2866/**
2867 * @ opcode 0xd6
2868 * @ oppfx 0x66
2869 * @ opcpuid sse2
2870 * @ opgroup og_sse2_pcksclr_datamove
2871 * @ opxcpttype none
2872 * @ optest op1=-1 op2=2 -> op1=2
2873 * @ optest op1=0 op2=-42 -> op1=-42
2874 */
2875FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
2876//FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
2877//{
2878// IEMOP_MNEMONIC2(MR, VMOVQ, vmovq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2879// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2880// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2881// {
2882// /*
2883// * Register, register.
2884// */
2885// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2886// IEM_MC_BEGIN(0, 2);
2887// IEM_MC_LOCAL(uint64_t, uSrc);
2888//
2889// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2890// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2891//
2892// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893// IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
2894//
2895// IEM_MC_ADVANCE_RIP();
2896// IEM_MC_END();
2897// }
2898// else
2899// {
2900// /*
2901// * Memory, register.
2902// */
2903// IEM_MC_BEGIN(0, 2);
2904// IEM_MC_LOCAL(uint64_t, uSrc);
2905// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2906//
2907// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2908// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2909// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2910// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2911//
2912// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2913// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2914//
2915// IEM_MC_ADVANCE_RIP();
2916// IEM_MC_END();
2917// }
2918// return VINF_SUCCESS;
2919//}
2920
2921/* Opcode VEX.F3.0F 0xd6 - invalid */
2922/* Opcode VEX.F2.0F 0xd6 - invalid */
2923
2924
2925/* Opcode VEX.0F 0xd7 - invalid */
2926
2927/** Opcode VEX.66.0F 0xd7 - */
2928FNIEMOP_STUB(iemOp_vpmovmskb_Gd_Ux);
2929//FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
2930//{
2931// /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
2932// /** @todo testcase: Check that the instruction implicitly clears the high
2933// * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
2934// * and opcode modifications are made to work with the whole width (not
2935// * just 128). */
2936// IEMOP_MNEMONIC(vpmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
2937// /* Docs says register only. */
2938// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2939// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
2940// {
2941// IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
2942// IEM_MC_BEGIN(2, 0);
2943// IEM_MC_ARG(uint64_t *, pDst, 0);
2944// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2945// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2946// IEM_MC_PREPARE_SSE_USAGE();
2947// IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2948// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2949// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
2950// IEM_MC_ADVANCE_RIP();
2951// IEM_MC_END();
2952// return VINF_SUCCESS;
2953// }
2954// return IEMOP_RAISE_INVALID_OPCODE();
2955//}
2956
2957/* Opcode VEX.F3.0F 0xd7 - invalid */
2958/* Opcode VEX.F2.0F 0xd7 - invalid */
2959
2960
2961/* Opcode VEX.0F 0xd8 - invalid */
2962/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
2963FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
2964/* Opcode VEX.F3.0F 0xd8 - invalid */
2965/* Opcode VEX.F2.0F 0xd8 - invalid */
2966
2967/* Opcode VEX.0F 0xd9 - invalid */
2968/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
2969FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
2970/* Opcode VEX.F3.0F 0xd9 - invalid */
2971/* Opcode VEX.F2.0F 0xd9 - invalid */
2972
2973/* Opcode VEX.0F 0xda - invalid */
2974/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
2975FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
2976/* Opcode VEX.F3.0F 0xda - invalid */
2977/* Opcode VEX.F2.0F 0xda - invalid */
2978
2979/* Opcode VEX.0F 0xdb - invalid */
2980/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, W */
2981FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
2982/* Opcode VEX.F3.0F 0xdb - invalid */
2983/* Opcode VEX.F2.0F 0xdb - invalid */
2984
2985/* Opcode VEX.0F 0xdc - invalid */
2986/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
2987FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
2988/* Opcode VEX.F3.0F 0xdc - invalid */
2989/* Opcode VEX.F2.0F 0xdc - invalid */
2990
2991/* Opcode VEX.0F 0xdd - invalid */
2992/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
2993FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
2994/* Opcode VEX.F3.0F 0xdd - invalid */
2995/* Opcode VEX.F2.0F 0xdd - invalid */
2996
2997/* Opcode VEX.0F 0xde - invalid */
2998/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
2999FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
3000/* Opcode VEX.F3.0F 0xde - invalid */
3001/* Opcode VEX.F2.0F 0xde - invalid */
3002
3003/* Opcode VEX.0F 0xdf - invalid */
3004/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
3005FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
3006/* Opcode VEX.F3.0F 0xdf - invalid */
3007/* Opcode VEX.F2.0F 0xdf - invalid */
3008
3009/* Opcode VEX.0F 0xe0 - invalid */
3010/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
3011FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
3012/* Opcode VEX.F3.0F 0xe0 - invalid */
3013/* Opcode VEX.F2.0F 0xe0 - invalid */
3014
3015/* Opcode VEX.0F 0xe1 - invalid */
3016/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
3017FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
3018/* Opcode VEX.F3.0F 0xe1 - invalid */
3019/* Opcode VEX.F2.0F 0xe1 - invalid */
3020
3021/* Opcode VEX.0F 0xe2 - invalid */
3022/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
3023FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
3024/* Opcode VEX.F3.0F 0xe2 - invalid */
3025/* Opcode VEX.F2.0F 0xe2 - invalid */
3026
3027/* Opcode VEX.0F 0xe3 - invalid */
3028/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
3029FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
3030/* Opcode VEX.F3.0F 0xe3 - invalid */
3031/* Opcode VEX.F2.0F 0xe3 - invalid */
3032
3033/* Opcode VEX.0F 0xe4 - invalid */
3034/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
3035FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
3036/* Opcode VEX.F3.0F 0xe4 - invalid */
3037/* Opcode VEX.F2.0F 0xe4 - invalid */
3038
3039/* Opcode VEX.0F 0xe5 - invalid */
3040/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
3041FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
3042/* Opcode VEX.F3.0F 0xe5 - invalid */
3043/* Opcode VEX.F2.0F 0xe5 - invalid */
3044
3045/* Opcode VEX.0F 0xe6 - invalid */
3046/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
3047FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
3048/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
3049FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
3050/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
3051FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
3052
3053
3054/* Opcode VEX.0F 0xe7 - invalid */
3055
3056/** Opcode VEX.66.0F 0xe7 - vmovntdq Mx, Vx */
3057FNIEMOP_STUB(iemOp_vmovntdq_Mx_Vx);
3058//FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
3059//{
3060// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3061// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3062// {
3063// /* Register, memory. */
3064// IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
3065// IEM_MC_BEGIN(0, 2);
3066// IEM_MC_LOCAL(RTUINT128U, uSrc);
3067// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3068//
3069// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3070// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3071// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3072// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3073//
3074// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3075// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3076//
3077// IEM_MC_ADVANCE_RIP();
3078// IEM_MC_END();
3079// return VINF_SUCCESS;
3080// }
3081//
3082// /* The register, register encoding is invalid. */
3083// return IEMOP_RAISE_INVALID_OPCODE();
3084//}
3085
3086/* Opcode VEX.F3.0F 0xe7 - invalid */
3087/* Opcode VEX.F2.0F 0xe7 - invalid */
3088
3089
3090/* Opcode VEX.0F 0xe8 - invalid */
3091/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
3092FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
3093/* Opcode VEX.F3.0F 0xe8 - invalid */
3094/* Opcode VEX.F2.0F 0xe8 - invalid */
3095
3096/* Opcode VEX.0F 0xe9 - invalid */
3097/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
3098FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
3099/* Opcode VEX.F3.0F 0xe9 - invalid */
3100/* Opcode VEX.F2.0F 0xe9 - invalid */
3101
3102/* Opcode VEX.0F 0xea - invalid */
3103/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
3104FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
3105/* Opcode VEX.F3.0F 0xea - invalid */
3106/* Opcode VEX.F2.0F 0xea - invalid */
3107
3108/* Opcode VEX.0F 0xeb - invalid */
3109/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, W */
3110FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
3111/* Opcode VEX.F3.0F 0xeb - invalid */
3112/* Opcode VEX.F2.0F 0xeb - invalid */
3113
3114/* Opcode VEX.0F 0xec - invalid */
3115/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
3116FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
3117/* Opcode VEX.F3.0F 0xec - invalid */
3118/* Opcode VEX.F2.0F 0xec - invalid */
3119
3120/* Opcode VEX.0F 0xed - invalid */
3121/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
3122FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
3123/* Opcode VEX.F3.0F 0xed - invalid */
3124/* Opcode VEX.F2.0F 0xed - invalid */
3125
3126/* Opcode VEX.0F 0xee - invalid */
3127/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
3128FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
3129/* Opcode VEX.F3.0F 0xee - invalid */
3130/* Opcode VEX.F2.0F 0xee - invalid */
3131
3132
3133/* Opcode VEX.0F 0xef - invalid */
3134
3135/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
3136FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
3137{
3138 IEMOP_MNEMONIC(vpxor, "vpxor");
3139 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
3140}
3141
3142/* Opcode VEX.F3.0F 0xef - invalid */
3143/* Opcode VEX.F2.0F 0xef - invalid */
3144
3145/* Opcode VEX.0F 0xf0 - invalid */
3146/* Opcode VEX.66.0F 0xf0 - invalid */
3147/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
3148FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
3149
3150/* Opcode VEX.0F 0xf1 - invalid */
3151/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
3152FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
3153/* Opcode VEX.F2.0F 0xf1 - invalid */
3154
3155/* Opcode VEX.0F 0xf2 - invalid */
3156/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
3157FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
3158/* Opcode VEX.F2.0F 0xf2 - invalid */
3159
3160/* Opcode VEX.0F 0xf3 - invalid */
3161/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
3162FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
3163/* Opcode VEX.F2.0F 0xf3 - invalid */
3164
3165/* Opcode VEX.0F 0xf4 - invalid */
3166/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
3167FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
3168/* Opcode VEX.F2.0F 0xf4 - invalid */
3169
3170/* Opcode VEX.0F 0xf5 - invalid */
3171/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
3172FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
3173/* Opcode VEX.F2.0F 0xf5 - invalid */
3174
3175/* Opcode VEX.0F 0xf6 - invalid */
3176/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
3177FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
3178/* Opcode VEX.F2.0F 0xf6 - invalid */
3179
3180/* Opcode VEX.0F 0xf7 - invalid */
3181/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
3182FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
3183/* Opcode VEX.F2.0F 0xf7 - invalid */
3184
3185/* Opcode VEX.0F 0xf8 - invalid */
3186/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
3187FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
3188/* Opcode VEX.F2.0F 0xf8 - invalid */
3189
3190/* Opcode VEX.0F 0xf9 - invalid */
3191/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
3192FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
3193/* Opcode VEX.F2.0F 0xf9 - invalid */
3194
3195/* Opcode VEX.0F 0xfa - invalid */
3196/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
3197FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
3198/* Opcode VEX.F2.0F 0xfa - invalid */
3199
3200/* Opcode VEX.0F 0xfb - invalid */
3201/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
3202FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
3203/* Opcode VEX.F2.0F 0xfb - invalid */
3204
3205/* Opcode VEX.0F 0xfc - invalid */
3206/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
3207FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
3208/* Opcode VEX.F2.0F 0xfc - invalid */
3209
3210/* Opcode VEX.0F 0xfd - invalid */
3211/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
3212FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
3213/* Opcode VEX.F2.0F 0xfd - invalid */
3214
3215/* Opcode VEX.0F 0xfe - invalid */
3216/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
3217FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
3218/* Opcode VEX.F2.0F 0xfe - invalid */
3219
3220
3221/** Opcode **** 0x0f 0xff - UD0 */
3222FNIEMOP_DEF(iemOp_vud0)
3223{
3224 IEMOP_MNEMONIC(vud0, "vud0");
3225 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
3226 {
3227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
3228#ifndef TST_IEM_CHECK_MC
3229 RTGCPTR GCPtrEff;
3230 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
3231 if (rcStrict != VINF_SUCCESS)
3232 return rcStrict;
3233#endif
3234 IEMOP_HLP_DONE_DECODING();
3235 }
3236 return IEMOP_RAISE_INVALID_OPCODE();
3237}
3238
3239
3240
3241/**
3242 * VEX opcode map \#1.
3243 *
3244 * @sa g_apfnTwoByteMap
3245 */
3246IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
3247{
3248 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3249 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
3250 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
3251 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
3252 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
3253 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
3254 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
3255 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
3256 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
3257 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
3258 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
3259 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
3260 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
3261 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
3262 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
3263 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
3264 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
3265
3266 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
3267 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
3268 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
3269 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3270 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3271 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3272 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
3273 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3274 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
3275 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
3276 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
3277 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3278 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
3279 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
3280 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
3281 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3282
3283 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
3284 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
3285 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
3286 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
3287 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
3288 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
3289 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3290 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3291 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3292 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3293 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
3294 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3295 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
3296 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
3297 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3298 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3299
3300 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
3301 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
3302 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
3303 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
3304 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
3305 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
3306 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
3307 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
3308 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3309 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3310 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3311 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3312 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3313 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3314 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3315 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
3316
3317 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
3318 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
3319 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3320 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3321 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
3322 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
3323 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
3324 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
3325 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
3326 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
3327 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
3328 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
3329 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
3330 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
3331 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
3332 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
3333
3334 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3335 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
3336 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
3337 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
3338 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3339 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3340 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3341 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3342 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
3343 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
3344 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
3345 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
3346 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
3347 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
3348 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
3349 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
3350
3351 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3352 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3353 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3354 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3355 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3356 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3357 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3358 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3359 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3360 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3361 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3362 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3363 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3364 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3365 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3366 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
3367
3368 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
3369 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3370 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3371 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3372 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3373 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3374 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3375 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3376 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
3377 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
3378 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
3379 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
3380 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
3381 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
3382 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
3383 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
3384
3385 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3386 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3387 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3388 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3389 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3390 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3391 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3392 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3393 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3394 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
3395 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
3396 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
3397 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
3398 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
3399 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
3400 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
3401
3402 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
3403 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
3404 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
3405 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
3406 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
3407 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
3408 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
3409 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
3410 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
3411 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
3412 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
3413 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
3414 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
3415 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
3416 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
3417 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
3418
3419 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3420 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3421 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3422 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3423 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3424 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3425 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3426 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3427 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3428 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3429 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
3430 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
3431 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
3432 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
3433 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
3434 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
3435
3436 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3437 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3438 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3439 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3440 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3441 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3442 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3443 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3444 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3445 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3446 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
3447 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
3448 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
3449 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
3450 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
3451 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
3452
3453 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3454 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3455 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
3456 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3457 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3458 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3459 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
3460 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3461 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3462 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3463 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
3464 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
3465 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
3466 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
3467 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
3468 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
3469
3470 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
3471 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3472 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3473 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3474 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3475 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3476 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3477 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3478 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3479 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3480 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3481 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3482 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3483 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3484 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3485 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3486
3487 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3488 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3489 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3490 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3491 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3492 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3493 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
3494 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3495 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3496 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3497 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3498 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3499 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3500 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3501 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3502 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3503
3504 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
3505 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3506 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3507 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3508 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3509 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3510 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3511 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3512 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3513 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3514 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3515 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3516 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3517 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3518 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3519 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
3520};
3521AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
3522/** @} */
3523
3524
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette