VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 66886

Last change on this file since 66886 was 66886, checked in by vboxsync, 8 years ago

IEM: Implemented vmovups Vps,Wps (VEX.0f 10)

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 121.0 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 66886 2017-05-15 09:20:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2016 Oracle Corporation
11 *
12 * This file is part of VirtualBox Open Source Edition (OSE), as
13 * available from http://www.virtualbox.org. This file is free software;
14 * you can redistribute it and/or modify it under the terms of the GNU
15 * General Public License (GPL) as published by the Free Software
16 * Foundation, in version 2 as it comes in the "COPYING" file of the
17 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
19 */
20
21
22/** @name VEX Opcode Map 1
23 * @{
24 */
25
26
27/* Opcode VEX.0F 0x00 - invalid */
28/* Opcode VEX.0F 0x01 - invalid */
29/* Opcode VEX.0F 0x02 - invalid */
30/* Opcode VEX.0F 0x03 - invalid */
31/* Opcode VEX.0F 0x04 - invalid */
32/* Opcode VEX.0F 0x05 - invalid */
33/* Opcode VEX.0F 0x06 - invalid */
34/* Opcode VEX.0F 0x07 - invalid */
35/* Opcode VEX.0F 0x08 - invalid */
36/* Opcode VEX.0F 0x09 - invalid */
37/* Opcode VEX.0F 0x0a - invalid */
38
39/** Opcode VEX.0F 0x0b. */
40FNIEMOP_DEF(iemOp_vud2)
41{
42 IEMOP_MNEMONIC(vud2, "vud2");
43 return IEMOP_RAISE_INVALID_OPCODE();
44}
45
46/* Opcode VEX.0F 0x0c - invalid */
47/* Opcode VEX.0F 0x0d - invalid */
48/* Opcode VEX.0F 0x0e - invalid */
49/* Opcode VEX.0F 0x0f - invalid */
50
51
52/**
53 * @opcode 0x10
54 * @oppfx none
55 * @opcpuid avx
56 * @opgroup og_avx_simdfp_datamove
57 * @opxcpttype 4UA
58 * @optest op1=1 op2=2 -> op1=2
59 * @optest op1=0 op2=-22 -> op1=-22
60 */
61FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
62{
63 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
64 Assert(pVCpu->iem.s.uVexLength <= 1);
65 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
66 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
67 {
68 /*
69 * Register, register.
70 */
71 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV();
72 IEM_MC_BEGIN(0, 0);
73 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
74 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
75 if (pVCpu->iem.s.uVexLength == 0)
76 IEM_MC_COPY_YREG_U128_ZX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
77 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
78 else
79 IEM_MC_COPY_YREG_U256_ZX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
80 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
81 IEM_MC_ADVANCE_RIP();
82 IEM_MC_END();
83 }
84 else if (pVCpu->iem.s.uVexLength == 0)
85 {
86 /*
87 * 128-bit: Memory, register.
88 */
89 IEM_MC_BEGIN(0, 2);
90 IEM_MC_LOCAL(RTUINT128U, uSrc);
91 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
92
93 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
94 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV();
95 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
96 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
97
98 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
99 IEM_MC_STORE_YREG_U128_ZX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
100
101 IEM_MC_ADVANCE_RIP();
102 IEM_MC_END();
103 }
104 else
105 {
106 /*
107 * 256-bit: Memory, register.
108 */
109 IEM_MC_BEGIN(0, 2);
110 IEM_MC_LOCAL(RTUINT256U, uSrc);
111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
112
113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
114 IEMOP_HLP_DONE_DECODING_NO_AVX_PREFIX_AND_NO_VVVV();
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
117
118 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_STORE_YREG_U256_ZX(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
120
121 IEM_MC_ADVANCE_RIP();
122 IEM_MC_END();
123 }
124 return VINF_SUCCESS;
125
126}
127
128/** Opcode VEX.66.0F 0x10 - vmovupd Vpd, Wpd */
129FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
130
131
132/** Opcode VEX 0xf3 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
133/**
134 * @ opcode 0x10
135 * @ oppfx 0xf3
136 * @ opcpuid sse
137 * @ opgroup og_sse_simdfp_datamove
138 * @ opxcpttype 5
139 * @ optest op1=1 op2=2 -> op1=2
140 * @ optest op1=0 op2=-22 -> op1=-22
141 * @ oponly
142 */
143FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
144//FNIEMOP_DEF(iemOp_movss_Vss_Wss)
145//{
146// I E M O P _ M N E M O N I C 2(RM, VMOVSS, vmovss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
147// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
148// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
149// {
150// /*
151// * Register, register.
152// */
153// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
154// IEM_MC_BEGIN(0, 1);
155// IEM_MC_LOCAL(uint32_t, uSrc);
156//
157// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
158// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
159// IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
160// IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
161//
162// IEM_MC_ADVANCE_RIP();
163// IEM_MC_END();
164// }
165// else
166// {
167// /*
168// * Memory, register.
169// */
170// IEM_MC_BEGIN(0, 2);
171// IEM_MC_LOCAL(uint32_t, uSrc);
172// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
173//
174// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
175// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
176// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
177// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
178//
179// IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
180// IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
181//
182// IEM_MC_ADVANCE_RIP();
183// IEM_MC_END();
184// }
185// return VINF_SUCCESS;
186//}
187
188/** Opcode VEX.F2.0F 0x10 - vmovsd Vx, Hx, Wsd */
189FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
190
191
192/**
193 * @ opcode 0x11
194 * @ oppfx none
195 * @ opcpuid sse
196 * @ opgroup og_sse_simdfp_datamove
197 * @ opxcpttype 4UA
198 * @ optest op1=1 op2=2 -> op1=2
199 * @ optest op1=0 op2=-42 -> op1=-42
200 */
201FNIEMOP_STUB(iemOp_vmovups_Wps_Vps);
202//FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
203//{
204// IEMOP_MNEMONIC2(MR, VMOVUPS, vmovups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
205// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
206// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
207// {
208// /*
209// * Register, register.
210// */
211// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
212// IEM_MC_BEGIN(0, 0);
213// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
214// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
215// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
216// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
217// IEM_MC_ADVANCE_RIP();
218// IEM_MC_END();
219// }
220// else
221// {
222// /*
223// * Memory, register.
224// */
225// IEM_MC_BEGIN(0, 2);
226// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
227// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
228//
229// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
230// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
231// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
232// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
233//
234// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
235// IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
236//
237// IEM_MC_ADVANCE_RIP();
238// IEM_MC_END();
239// }
240// return VINF_SUCCESS;
241//}
242
243
244/**
245 * @ opcode 0x11
246 * @ oppfx 0x66
247 * @ opcpuid sse2
248 * @ opgroup og_sse2_pcksclr_datamove
249 * @ opxcpttype 4UA
250 * @ optest op1=1 op2=2 -> op1=2
251 * @ optest op1=0 op2=-42 -> op1=-42
252 */
253FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
254//FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
255//{
256// IEMOP_MNEMONIC2(MR, VMOVUPD, vmovupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
257// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
258// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
259// {
260// /*
261// * Register, register.
262// */
263// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264// IEM_MC_BEGIN(0, 0);
265// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
266// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
267// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
268// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
269// IEM_MC_ADVANCE_RIP();
270// IEM_MC_END();
271// }
272// else
273// {
274// /*
275// * Memory, register.
276// */
277// IEM_MC_BEGIN(0, 2);
278// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
279// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
280//
281// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
282// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
283// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
284// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
285//
286// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
287// IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
288//
289// IEM_MC_ADVANCE_RIP();
290// IEM_MC_END();
291// }
292// return VINF_SUCCESS;
293//}
294
295
296/**
297 * @ opcode 0x11
298 * @ oppfx 0xf3
299 * @ opcpuid sse
300 * @ opgroup og_sse_simdfp_datamove
301 * @ opxcpttype 5
302 * @ optest op1=1 op2=2 -> op1=2
303 * @ optest op1=0 op2=-22 -> op1=-22
304 */
305FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
306//FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss)
307//{
308// IEMOP_MNEMONIC2(MR, VMOVSS, vmovss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
309// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
310// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
311// {
312// /*
313// * Register, register.
314// */
315// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
316// IEM_MC_BEGIN(0, 1);
317// IEM_MC_LOCAL(uint32_t, uSrc);
318//
319// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
320// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
321// IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
322// IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
323//
324// IEM_MC_ADVANCE_RIP();
325// IEM_MC_END();
326// }
327// else
328// {
329// /*
330// * Memory, register.
331// */
332// IEM_MC_BEGIN(0, 2);
333// IEM_MC_LOCAL(uint32_t, uSrc);
334// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
335//
336// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
337// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
338// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
339// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
340//
341// IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
342// IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
343//
344// IEM_MC_ADVANCE_RIP();
345// IEM_MC_END();
346// }
347// return VINF_SUCCESS;
348//}
349
350
351/**
352 * @ opcode 0x11
353 * @ oppfx 0xf2
354 * @ opcpuid sse2
355 * @ opgroup og_sse2_pcksclr_datamove
356 * @ opxcpttype 5
357 * @ optest op1=1 op2=2 -> op1=2
358 * @ optest op1=0 op2=-42 -> op1=-42
359 */
360FNIEMOP_STUB(iemOp_vmovsd_Wsd_Hx_Vsd);
361//FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
362//{
363// IEMOP_MNEMONIC2(MR, VMOVSD, vmovsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
364// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
365// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
366// {
367// /*
368// * Register, register.
369// */
370// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
371// IEM_MC_BEGIN(0, 1);
372// IEM_MC_LOCAL(uint64_t, uSrc);
373//
374// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
375// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
376// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
377// IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
378//
379// IEM_MC_ADVANCE_RIP();
380// IEM_MC_END();
381// }
382// else
383// {
384// /*
385// * Memory, register.
386// */
387// IEM_MC_BEGIN(0, 2);
388// IEM_MC_LOCAL(uint64_t, uSrc);
389// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
390//
391// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
392// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
393// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
394// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
395//
396// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
397// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
398//
399// IEM_MC_ADVANCE_RIP();
400// IEM_MC_END();
401// }
402// return VINF_SUCCESS;
403//}
404
405
406FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps);
407//FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
408//{
409// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
410// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
411// {
412// /**
413// * @ opcode 0x12
414// * @ opcodesub 11 mr/reg
415// * @ oppfx none
416// * @ opcpuid sse
417// * @ opgroup og_sse_simdfp_datamove
418// * @ opxcpttype 5
419// * @ optest op1=1 op2=2 -> op1=2
420// * @ optest op1=0 op2=-42 -> op1=-42
421// */
422// IEMOP_MNEMONIC2(RM_REG, VMOVHLPS, vmovhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
423//
424// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
425// IEM_MC_BEGIN(0, 1);
426// IEM_MC_LOCAL(uint64_t, uSrc);
427//
428// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
429// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
430// IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
431// IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
432//
433// IEM_MC_ADVANCE_RIP();
434// IEM_MC_END();
435// }
436// else
437// {
438// /**
439// * @ opdone
440// * @ opcode 0x12
441// * @ opcodesub !11 mr/reg
442// * @ oppfx none
443// * @ opcpuid sse
444// * @ opgroup og_sse_simdfp_datamove
445// * @ opxcpttype 5
446// * @ optest op1=1 op2=2 -> op1=2
447// * @ optest op1=0 op2=-42 -> op1=-42
448// * @ opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
449// */
450// IEMOP_MNEMONIC2(RM_MEM, VMOVLPS, vmovlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
451//
452// IEM_MC_BEGIN(0, 2);
453// IEM_MC_LOCAL(uint64_t, uSrc);
454// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
455//
456// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
457// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
458// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
459// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
460//
461// IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
462// IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
463//
464// IEM_MC_ADVANCE_RIP();
465// IEM_MC_END();
466// }
467// return VINF_SUCCESS;
468//}
469
470
471/**
472 * @ opcode 0x12
473 * @ opcodesub !11 mr/reg
474 * @ oppfx 0x66
475 * @ opcpuid sse2
476 * @ opgroup og_sse2_pcksclr_datamove
477 * @ opxcpttype 5
478 * @ optest op1=1 op2=2 -> op1=2
479 * @ optest op1=0 op2=-42 -> op1=-42
480 */
481FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq);
482//FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
483//{
484// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
485// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
486// {
487// IEMOP_MNEMONIC2(RM_MEM, VMOVLPD, vmovlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
488//
489// IEM_MC_BEGIN(0, 2);
490// IEM_MC_LOCAL(uint64_t, uSrc);
491// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
492//
493// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
494// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
495// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
496// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
497//
498// IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499// IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
500//
501// IEM_MC_ADVANCE_RIP();
502// IEM_MC_END();
503// return VINF_SUCCESS;
504// }
505//
506// /**
507// * @ opdone
508// * @ opmnemonic ud660f12m3
509// * @ opcode 0x12
510// * @ opcodesub 11 mr/reg
511// * @ oppfx 0x66
512// * @ opunused immediate
513// * @ opcpuid sse
514// * @ optest ->
515// */
516// return IEMOP_RAISE_INVALID_OPCODE();
517//}
518
519
520/**
521 * @ opcode 0x12
522 * @ oppfx 0xf3
523 * @ opcpuid sse3
524 * @ opgroup og_sse3_pcksclr_datamove
525 * @ opxcpttype 4
526 * @ optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
527 * op1=0x00000002000000020000000100000001
528 */
529FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx);
530//FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
531//{
532// IEMOP_MNEMONIC2(RM, VMOVSLDUP, vmovsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
533// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
534// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
535// {
536// /*
537// * Register, register.
538// */
539// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
540// IEM_MC_BEGIN(2, 0);
541// IEM_MC_ARG(PRTUINT128U, puDst, 0);
542// IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
543//
544// IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
545// IEM_MC_PREPARE_SSE_USAGE();
546//
547// IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
548// IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
549// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
550//
551// IEM_MC_ADVANCE_RIP();
552// IEM_MC_END();
553// }
554// else
555// {
556// /*
557// * Register, memory.
558// */
559// IEM_MC_BEGIN(2, 2);
560// IEM_MC_LOCAL(RTUINT128U, uSrc);
561// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
562// IEM_MC_ARG(PRTUINT128U, puDst, 0);
563// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
564//
565// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
566// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
567// IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
568// IEM_MC_PREPARE_SSE_USAGE();
569//
570// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
571// IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
572// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
573//
574// IEM_MC_ADVANCE_RIP();
575// IEM_MC_END();
576// }
577// return VINF_SUCCESS;
578//}
579
580
581/**
582 * @ opcode 0x12
583 * @ oppfx 0xf2
584 * @ opcpuid sse3
585 * @ opgroup og_sse3_pcksclr_datamove
586 * @ opxcpttype 5
587 * @ optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
588 * op1=0x22222222111111112222222211111111
589 */
590FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx);
591//FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
592//{
593// IEMOP_MNEMONIC2(RM, VMOVDDUP, vmovddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
594// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
595// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
596// {
597// /*
598// * Register, register.
599// */
600// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601// IEM_MC_BEGIN(2, 0);
602// IEM_MC_ARG(PRTUINT128U, puDst, 0);
603// IEM_MC_ARG(uint64_t, uSrc, 1);
604//
605// IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
606// IEM_MC_PREPARE_SSE_USAGE();
607//
608// IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
609// IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
610// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
611//
612// IEM_MC_ADVANCE_RIP();
613// IEM_MC_END();
614// }
615// else
616// {
617// /*
618// * Register, memory.
619// */
620// IEM_MC_BEGIN(2, 2);
621// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
622// IEM_MC_ARG(PRTUINT128U, puDst, 0);
623// IEM_MC_ARG(uint64_t, uSrc, 1);
624//
625// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
626// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627// IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT();
628// IEM_MC_PREPARE_SSE_USAGE();
629//
630// IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
631// IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
632// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
633//
634// IEM_MC_ADVANCE_RIP();
635// IEM_MC_END();
636// }
637// return VINF_SUCCESS;
638//}
639
640
641/** Opcode VEX.0F 0x13 - vmovlps Mq, Vq */
642FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
643
644/** Opcode VEX.66.0F 0x13 - vmovlpd Mq, Vq */
645FNIEMOP_STUB(iemOp_vmovlpd_Mq_Vq);
646//FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
647//{
648// IEMOP_MNEMONIC(vmovlpd_Mq_Vq, "movlpd Mq,Vq");
649// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
650// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
651// {
652//#if 0
653// /*
654// * Register, register.
655// */
656// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
657// IEM_MC_BEGIN(0, 1);
658// IEM_MC_LOCAL(uint64_t, uSrc);
659// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
660// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
661// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
662// IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
663// IEM_MC_ADVANCE_RIP();
664// IEM_MC_END();
665//#else
666// return IEMOP_RAISE_INVALID_OPCODE();
667//#endif
668// }
669// else
670// {
671// /*
672// * Memory, register.
673// */
674// IEM_MC_BEGIN(0, 2);
675// IEM_MC_LOCAL(uint64_t, uSrc);
676// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677//
678// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
680// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
681// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
682//
683// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
684// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
685//
686// IEM_MC_ADVANCE_RIP();
687// IEM_MC_END();
688// }
689// return VINF_SUCCESS;
690//}
691
692/* Opcode VEX.F3.0F 0x13 - invalid */
693/* Opcode VEX.F2.0F 0x13 - invalid */
694
695/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
696FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
697/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
698FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
699/* Opcode VEX.F3.0F 0x14 - invalid */
700/* Opcode VEX.F2.0F 0x14 - invalid */
701/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
702FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
703/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
704FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
705/* Opcode VEX.F3.0F 0x15 - invalid */
706/* Opcode VEX.F2.0F 0x15 - invalid */
707/** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
708FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
709/** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */
710FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
711/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
712FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
713/* Opcode VEX.F2.0F 0x16 - invalid */
714/** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */
715FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
716/** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */
717FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
718/* Opcode VEX.F3.0F 0x17 - invalid */
719/* Opcode VEX.F2.0F 0x17 - invalid */
720
721
722/* Opcode VEX.0F 0x18 - invalid */
723/* Opcode VEX.0F 0x19 - invalid */
724/* Opcode VEX.0F 0x1a - invalid */
725/* Opcode VEX.0F 0x1b - invalid */
726/* Opcode VEX.0F 0x1c - invalid */
727/* Opcode VEX.0F 0x1d - invalid */
728/* Opcode VEX.0F 0x1e - invalid */
729/* Opcode VEX.0F 0x1f - invalid */
730
731/* Opcode VEX.0F 0x20 - invalid */
732/* Opcode VEX.0F 0x21 - invalid */
733/* Opcode VEX.0F 0x22 - invalid */
734/* Opcode VEX.0F 0x23 - invalid */
735/* Opcode VEX.0F 0x24 - invalid */
736/* Opcode VEX.0F 0x25 - invalid */
737/* Opcode VEX.0F 0x26 - invalid */
738/* Opcode VEX.0F 0x27 - invalid */
739
740/** Opcode VEX.0F 0x28 - vmovaps Vps, Wps */
741FNIEMOP_STUB(iemOp_vmovaps_Vps_Wps);
742//FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
743//{
744// IEMOP_MNEMONIC(vmovaps_Vps_Wps, "vmovaps Vps,Wps");
745// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
746// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
747// {
748// /*
749// * Register, register.
750// */
751// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
752// IEM_MC_BEGIN(0, 0);
753// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
754// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
755// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
756// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
757// IEM_MC_ADVANCE_RIP();
758// IEM_MC_END();
759// }
760// else
761// {
762// /*
763// * Register, memory.
764// */
765// IEM_MC_BEGIN(0, 2);
766// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
767// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
768//
769// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
770// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
771// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
772// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
773//
774// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
775// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
776//
777// IEM_MC_ADVANCE_RIP();
778// IEM_MC_END();
779// }
780// return VINF_SUCCESS;
781//}
782
783/** Opcode VEX.66.0F 0x28 - vmovapd Vpd, Wpd */
784FNIEMOP_STUB(iemOp_vmovapd_Vpd_Wpd);
785//FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
786//{
787// IEMOP_MNEMONIC(vmovapd_Wpd_Wpd, "vmovapd Wpd,Wpd");
788// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
789// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
790// {
791// /*
792// * Register, register.
793// */
794// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
795// IEM_MC_BEGIN(0, 0);
796// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
797// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
798// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
799// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
800// IEM_MC_ADVANCE_RIP();
801// IEM_MC_END();
802// }
803// else
804// {
805// /*
806// * Register, memory.
807// */
808// IEM_MC_BEGIN(0, 2);
809// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
810// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
811//
812// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
813// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
814// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
815// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
816//
817// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
818// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
819//
820// IEM_MC_ADVANCE_RIP();
821// IEM_MC_END();
822// }
823// return VINF_SUCCESS;
824//}
825
826/* Opcode VEX.F3.0F 0x28 - invalid */
827/* Opcode VEX.F2.0F 0x28 - invalid */
828
829/** Opcode VEX.0F 0x29 - vmovaps Wps, Vps */
830FNIEMOP_STUB(iemOp_vmovaps_Wps_Vps);
831//FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
832//{
833// IEMOP_MNEMONIC(vmovaps_Wps_Vps, "vmovaps Wps,Vps");
834// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
835// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
836// {
837// /*
838// * Register, register.
839// */
840// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
841// IEM_MC_BEGIN(0, 0);
842// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
843// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
844// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
845// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
846// IEM_MC_ADVANCE_RIP();
847// IEM_MC_END();
848// }
849// else
850// {
851// /*
852// * Memory, register.
853// */
854// IEM_MC_BEGIN(0, 2);
855// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
856// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
857//
858// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
859// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
860// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
861// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
862//
863// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
864// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
865//
866// IEM_MC_ADVANCE_RIP();
867// IEM_MC_END();
868// }
869// return VINF_SUCCESS;
870//}
871
872/** Opcode VEX.66.0F 0x29 - vmovapd Wpd,Vpd */
873FNIEMOP_STUB(iemOp_vmovapd_Wpd_Vpd);
874//FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
875//{
876// IEMOP_MNEMONIC(vmovapd_Wpd_Vpd, "movapd Wpd,Vpd");
877// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
878// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
879// {
880// /*
881// * Register, register.
882// */
883// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
884// IEM_MC_BEGIN(0, 0);
885// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
886// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
887// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
888// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
889// IEM_MC_ADVANCE_RIP();
890// IEM_MC_END();
891// }
892// else
893// {
894// /*
895// * Memory, register.
896// */
897// IEM_MC_BEGIN(0, 2);
898// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
899// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
900//
901// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
902// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
903// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
904// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
905//
906// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
907// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
908//
909// IEM_MC_ADVANCE_RIP();
910// IEM_MC_END();
911// }
912// return VINF_SUCCESS;
913//}
914
915/* Opcode VEX.F3.0F 0x29 - invalid */
916/* Opcode VEX.F2.0F 0x29 - invalid */
917
918
919/** Opcode VEX.0F 0x2a - invalid */
920/** Opcode VEX.66.0F 0x2a - invalid */
921/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
922FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
923/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
924FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
925
926
927/** Opcode VEX.0F 0x2b - vmovntps Mps, Vps */
928FNIEMOP_STUB(iemOp_vmovntps_Mps_Vps);
929//FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
930//{
931// IEMOP_MNEMONIC(vmovntps_Mps_Vps, "movntps Mps,Vps");
932// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
933// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
934// {
935// /*
936// * memory, register.
937// */
938// IEM_MC_BEGIN(0, 2);
939// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
940// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
941//
942// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
943// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
944// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
945// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
946//
947// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
948// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
949//
950// IEM_MC_ADVANCE_RIP();
951// IEM_MC_END();
952// }
953// /* The register, register encoding is invalid. */
954// else
955// return IEMOP_RAISE_INVALID_OPCODE();
956// return VINF_SUCCESS;
957//}
958
959/** Opcode VEX.66.0F 0x2b - vmovntpd Mpd, Vpd */
960FNIEMOP_STUB(iemOp_vmovntpd_Mpd_Vpd);
961//FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
962//{
963// IEMOP_MNEMONIC(vmovntpd_Mpd_Vpd, "movntpd Mdq,Vpd");
964// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
965// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
966// {
967// /*
968// * memory, register.
969// */
970// IEM_MC_BEGIN(0, 2);
971// IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
972// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
973//
974// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
975// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
976// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
977// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
978//
979// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
980// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
981//
982// IEM_MC_ADVANCE_RIP();
983// IEM_MC_END();
984// }
985// /* The register, register encoding is invalid. */
986// else
987// return IEMOP_RAISE_INVALID_OPCODE();
988// return VINF_SUCCESS;
989//}
990/* Opcode VEX.F3.0F 0x2b - invalid */
991/* Opcode VEX.F2.0F 0x2b - invalid */
992
993
994/* Opcode VEX.0F 0x2c - invalid */
995/* Opcode VEX.66.0F 0x2c - invalid */
996/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
997FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
998/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
999FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
1000
1001/* Opcode VEX.0F 0x2d - invalid */
1002/* Opcode VEX.66.0F 0x2d - invalid */
1003/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
1004FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
1005/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
1006FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
1007
1008/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
1009FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss);
1010/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
1011FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd);
1012/* Opcode VEX.F3.0F 0x2e - invalid */
1013/* Opcode VEX.F2.0F 0x2e - invalid */
1014
1015/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
1016FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
1017/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
1018FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
1019/* Opcode VEX.F3.0F 0x2f - invalid */
1020/* Opcode VEX.F2.0F 0x2f - invalid */
1021
1022/* Opcode VEX.0F 0x30 - invalid */
1023/* Opcode VEX.0F 0x31 - invalid */
1024/* Opcode VEX.0F 0x32 - invalid */
1025/* Opcode VEX.0F 0x33 - invalid */
1026/* Opcode VEX.0F 0x34 - invalid */
1027/* Opcode VEX.0F 0x35 - invalid */
1028/* Opcode VEX.0F 0x36 - invalid */
1029/* Opcode VEX.0F 0x37 - invalid */
1030/* Opcode VEX.0F 0x38 - invalid */
1031/* Opcode VEX.0F 0x39 - invalid */
1032/* Opcode VEX.0F 0x3a - invalid */
1033/* Opcode VEX.0F 0x3b - invalid */
1034/* Opcode VEX.0F 0x3c - invalid */
1035/* Opcode VEX.0F 0x3d - invalid */
1036/* Opcode VEX.0F 0x3e - invalid */
1037/* Opcode VEX.0F 0x3f - invalid */
1038/* Opcode VEX.0F 0x40 - invalid */
1039/* Opcode VEX.0F 0x41 - invalid */
1040/* Opcode VEX.0F 0x42 - invalid */
1041/* Opcode VEX.0F 0x43 - invalid */
1042/* Opcode VEX.0F 0x44 - invalid */
1043/* Opcode VEX.0F 0x45 - invalid */
1044/* Opcode VEX.0F 0x46 - invalid */
1045/* Opcode VEX.0F 0x47 - invalid */
1046/* Opcode VEX.0F 0x48 - invalid */
1047/* Opcode VEX.0F 0x49 - invalid */
1048/* Opcode VEX.0F 0x4a - invalid */
1049/* Opcode VEX.0F 0x4b - invalid */
1050/* Opcode VEX.0F 0x4c - invalid */
1051/* Opcode VEX.0F 0x4d - invalid */
1052/* Opcode VEX.0F 0x4e - invalid */
1053/* Opcode VEX.0F 0x4f - invalid */
1054
1055/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
1056FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
1057/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
1058FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
1059/* Opcode VEX.F3.0F 0x50 - invalid */
1060/* Opcode VEX.F2.0F 0x50 - invalid */
1061
1062/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
1063FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
1064/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
1065FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
1066/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
1067FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
1068/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
1069FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
1070
1071/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
1072FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
1073/* Opcode VEX.66.0F 0x52 - invalid */
1074/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
1075FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
1076/* Opcode VEX.F2.0F 0x52 - invalid */
1077
1078/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
1079FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
1080/* Opcode VEX.66.0F 0x53 - invalid */
1081/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
1082FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
1083/* Opcode VEX.F2.0F 0x53 - invalid */
1084
1085/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
1086FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
1087/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
1088FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
1089/* Opcode VEX.F3.0F 0x54 - invalid */
1090/* Opcode VEX.F2.0F 0x54 - invalid */
1091
1092/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
1093FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
1094/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
1095FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
1096/* Opcode VEX.F3.0F 0x55 - invalid */
1097/* Opcode VEX.F2.0F 0x55 - invalid */
1098
1099/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
1100FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
1101/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
1102FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
1103/* Opcode VEX.F3.0F 0x56 - invalid */
1104/* Opcode VEX.F2.0F 0x56 - invalid */
1105
1106/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
1107FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
1108/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
1109FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
1110/* Opcode VEX.F3.0F 0x57 - invalid */
1111/* Opcode VEX.F2.0F 0x57 - invalid */
1112
1113/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
1114FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
1115/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
1116FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
1117/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
1118FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
1119/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
1120FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
1121
1122/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
1123FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
1124/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
1125FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
1126/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
1127FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
1128/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
1129FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
1130
1131/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
1132FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
1133/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
1134FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
1135/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
1136FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
1137/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
1138FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
1139
1140/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
1141FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
1142/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
1143FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
1144/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
1145FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
1146/* Opcode VEX.F2.0F 0x5b - invalid */
1147
1148/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
1149FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
1150/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
1151FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
1152/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
1153FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
1154/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
1155FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
1156
1157/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
1158FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
1159/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
1160FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
1161/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
1162FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
1163/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
1164FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
1165
1166/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
1167FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
1168/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
1169FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
1170/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
1171FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
1172/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
1173FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
1174
1175/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
1176FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
1177/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
1178FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
1179/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
1180FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
1181/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
1182FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
1183
1184
1185///**
1186// * Common worker for SSE2 instructions on the forms:
1187// * pxxxx xmm1, xmm2/mem128
1188// *
1189// * The 2nd operand is the first half of a register, which in the memory case
1190// * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1191// * memory accessed for MMX.
1192// *
1193// * Exceptions type 4.
1194// */
1195//FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1196//{
1197// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1198// if (!pImpl->pfnU64)
1199// return IEMOP_RAISE_INVALID_OPCODE();
1200// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1201// {
1202// /*
1203// * Register, register.
1204// */
1205// /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
1206// /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
1207// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208// IEM_MC_BEGIN(2, 0);
1209// IEM_MC_ARG(uint64_t *, pDst, 0);
1210// IEM_MC_ARG(uint32_t const *, pSrc, 1);
1211// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1212// IEM_MC_PREPARE_FPU_USAGE();
1213// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1214// IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
1215// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1216// IEM_MC_ADVANCE_RIP();
1217// IEM_MC_END();
1218// }
1219// else
1220// {
1221// /*
1222// * Register, memory.
1223// */
1224// IEM_MC_BEGIN(2, 2);
1225// IEM_MC_ARG(uint64_t *, pDst, 0);
1226// IEM_MC_LOCAL(uint32_t, uSrc);
1227// IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
1228// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1229//
1230// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1231// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1232// IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
1233// IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1234//
1235// IEM_MC_PREPARE_FPU_USAGE();
1236// IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1237// IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
1238//
1239// IEM_MC_ADVANCE_RIP();
1240// IEM_MC_END();
1241// }
1242// return VINF_SUCCESS;
1243//}
1244
1245
1246/* Opcode VEX.0F 0x60 - invalid */
1247
1248/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, W */
1249FNIEMOP_STUB(iemOp_vpunpcklbw_Vx_Hx_Wx);
1250//FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
1251//{
1252// IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
1253// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
1254//}
1255
1256/* Opcode VEX.F3.0F 0x60 - invalid */
1257
1258
1259/* Opcode VEX.0F 0x61 - invalid */
1260
1261/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
1262FNIEMOP_STUB(iemOp_vpunpcklwd_Vx_Hx_Wx);
1263//FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
1264//{
1265// IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
1266// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
1267//}
1268
1269/* Opcode VEX.F3.0F 0x61 - invalid */
1270
1271
1272/* Opcode VEX.0F 0x62 - invalid */
1273
1274/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
1275FNIEMOP_STUB(iemOp_vpunpckldq_Vx_Hx_Wx);
1276//FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
1277//{
1278// IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
1279// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
1280//}
1281
1282/* Opcode VEX.F3.0F 0x62 - invalid */
1283
1284
1285
1286/* Opcode VEX.0F 0x63 - invalid */
1287/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
1288FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
1289/* Opcode VEX.F3.0F 0x63 - invalid */
1290
1291/* Opcode VEX.0F 0x64 - invalid */
1292/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
1293FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
1294/* Opcode VEX.F3.0F 0x64 - invalid */
1295
1296/* Opcode VEX.0F 0x65 - invalid */
1297/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
1298FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
1299/* Opcode VEX.F3.0F 0x65 - invalid */
1300
1301/* Opcode VEX.0F 0x66 - invalid */
1302/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
1303FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
1304/* Opcode VEX.F3.0F 0x66 - invalid */
1305
1306/* Opcode VEX.0F 0x67 - invalid */
1307/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
1308FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
1309/* Opcode VEX.F3.0F 0x67 - invalid */
1310
1311
1312///**
1313// * Common worker for SSE2 instructions on the form:
1314// * pxxxx xmm1, xmm2/mem128
1315// *
1316// * The 2nd operand is the second half of a register, which in the memory case
1317// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
1318// * where it may read the full 128 bits or only the upper 64 bits.
1319// *
1320// * Exceptions type 4.
1321// */
1322//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
1323//{
1324// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1325// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1326// {
1327// /*
1328// * Register, register.
1329// */
1330// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1331// IEM_MC_BEGIN(2, 0);
1332// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1333// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1334// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1335// IEM_MC_PREPARE_SSE_USAGE();
1336// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1337// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1338// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1339// IEM_MC_ADVANCE_RIP();
1340// IEM_MC_END();
1341// }
1342// else
1343// {
1344// /*
1345// * Register, memory.
1346// */
1347// IEM_MC_BEGIN(2, 2);
1348// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1349// IEM_MC_LOCAL(RTUINT128U, uSrc);
1350// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1351// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1352//
1353// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1354// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1355// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1356// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
1357//
1358// IEM_MC_PREPARE_SSE_USAGE();
1359// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1361//
1362// IEM_MC_ADVANCE_RIP();
1363// IEM_MC_END();
1364// }
1365// return VINF_SUCCESS;
1366//}
1367
1368
1369/* Opcode VEX.0F 0x68 - invalid */
1370
1371/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
1372FNIEMOP_STUB(iemOp_vpunpckhbw_Vx_Hx_Wx);
1373//FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
1374//{
1375// IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx");
1376// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
1377//}
1378/* Opcode VEX.F3.0F 0x68 - invalid */
1379
1380
1381/* Opcode VEX.0F 0x69 - invalid */
1382
1383/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
1384FNIEMOP_STUB(iemOp_vpunpckhwd_Vx_Hx_Wx);
1385//FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
1386//{
1387// IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");
1388// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
1389//
1390//}
1391/* Opcode VEX.F3.0F 0x69 - invalid */
1392
1393
1394/* Opcode VEX.0F 0x6a - invalid */
1395
1396/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
1397FNIEMOP_STUB(iemOp_vpunpckhdq_Vx_Hx_W);
1398//FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
1399//{
1400// IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W");
1401// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
1402//}
1403/* Opcode VEX.F3.0F 0x6a - invalid */
1404
1405
1406/* Opcode VEX.0F 0x6b - invalid */
1407/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
1408FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx);
1409/* Opcode VEX.F3.0F 0x6b - invalid */
1410
1411
1412/* Opcode VEX.0F 0x6c - invalid */
1413
1414/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
1415FNIEMOP_STUB(iemOp_vpunpcklqdq_Vx_Hx_Wx);
1416//FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
1417//{
1418// IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
1419// return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
1420//}
1421
1422/* Opcode VEX.F3.0F 0x6c - invalid */
1423/* Opcode VEX.F2.0F 0x6c - invalid */
1424
1425
1426/* Opcode VEX.0F 0x6d - invalid */
1427
1428/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
1429FNIEMOP_STUB(iemOp_vpunpckhqdq_Vx_Hx_W);
1430//FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
1431//{
1432// IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
1433// return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
1434//}
1435
1436/* Opcode VEX.F3.0F 0x6d - invalid */
1437
1438
1439/* Opcode VEX.0F 0x6e - invalid */
1440
1441/** Opcode VEX.66.0F 0x6e - vmovd/q Vy, Ey */
1442FNIEMOP_STUB(iemOp_vmovd_q_Vy_Ey);
1443//FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
1444//{
1445// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1446// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1447// IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq");
1448// else
1449// IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed");
1450// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1451// {
1452// /* XMM, greg*/
1453// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1454// IEM_MC_BEGIN(0, 1);
1455// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1456// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1457// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1458// {
1459// IEM_MC_LOCAL(uint64_t, u64Tmp);
1460// IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1461// IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1462// }
1463// else
1464// {
1465// IEM_MC_LOCAL(uint32_t, u32Tmp);
1466// IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1467// IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1468// }
1469// IEM_MC_ADVANCE_RIP();
1470// IEM_MC_END();
1471// }
1472// else
1473// {
1474// /* XMM, [mem] */
1475// IEM_MC_BEGIN(0, 2);
1476// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1477// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
1478// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1479// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1480// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1481// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1482// {
1483// IEM_MC_LOCAL(uint64_t, u64Tmp);
1484// IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1485// IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1486// }
1487// else
1488// {
1489// IEM_MC_LOCAL(uint32_t, u32Tmp);
1490// IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1491// IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1492// }
1493// IEM_MC_ADVANCE_RIP();
1494// IEM_MC_END();
1495// }
1496// return VINF_SUCCESS;
1497//}
1498
1499/* Opcode VEX.F3.0F 0x6e - invalid */
1500
1501
1502/* Opcode VEX.0F 0x6f - invalid */
1503
1504/** Opcode VEX.66.0F 0x6f - vmovdqa Vx, Wx */
1505FNIEMOP_STUB(iemOp_vmovdqa_Vx_Wx);
1506//FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
1507//{
1508// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1509// IEMOP_MNEMONIC(vmovdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
1510// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1511// {
1512// /*
1513// * Register, register.
1514// */
1515// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1516// IEM_MC_BEGIN(0, 0);
1517// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1518// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1519// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1520// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1521// IEM_MC_ADVANCE_RIP();
1522// IEM_MC_END();
1523// }
1524// else
1525// {
1526// /*
1527// * Register, memory.
1528// */
1529// IEM_MC_BEGIN(0, 2);
1530// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
1531// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1532//
1533// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1534// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1535// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1536// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1537// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1538// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
1539//
1540// IEM_MC_ADVANCE_RIP();
1541// IEM_MC_END();
1542// }
1543// return VINF_SUCCESS;
1544//}
1545
1546/** Opcode VEX.F3.0F 0x6f - vmovdqu Vx, Wx */
1547FNIEMOP_STUB(iemOp_vmovdqu_Vx_Wx);
1548//FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
1549//{
1550// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1551// IEMOP_MNEMONIC(vmovdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
1552// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1553// {
1554// /*
1555// * Register, register.
1556// */
1557// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1558// IEM_MC_BEGIN(0, 0);
1559// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1560// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1561// IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1562// (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1563// IEM_MC_ADVANCE_RIP();
1564// IEM_MC_END();
1565// }
1566// else
1567// {
1568// /*
1569// * Register, memory.
1570// */
1571// IEM_MC_BEGIN(0, 2);
1572// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
1573// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1574//
1575// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1576// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1577// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1578// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1579// IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1580// IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
1581//
1582// IEM_MC_ADVANCE_RIP();
1583// IEM_MC_END();
1584// }
1585// return VINF_SUCCESS;
1586//}
1587
1588
1589/* Opcode VEX.0F 0x70 - invalid */
1590
1591/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
1592FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib);
1593//FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
1594//{
1595// IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");
1596// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1597// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1598// {
1599// /*
1600// * Register, register.
1601// */
1602// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1603// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1604//
1605// IEM_MC_BEGIN(3, 0);
1606// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1607// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1608// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1609// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1610// IEM_MC_PREPARE_SSE_USAGE();
1611// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1612// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1613// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
1614// IEM_MC_ADVANCE_RIP();
1615// IEM_MC_END();
1616// }
1617// else
1618// {
1619// /*
1620// * Register, memory.
1621// */
1622// IEM_MC_BEGIN(3, 2);
1623// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1624// IEM_MC_LOCAL(RTUINT128U, uSrc);
1625// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1626// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1627//
1628// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1629// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1630// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1631// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1632// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1633//
1634// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1635// IEM_MC_PREPARE_SSE_USAGE();
1636// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1637// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg);
1638//
1639// IEM_MC_ADVANCE_RIP();
1640// IEM_MC_END();
1641// }
1642// return VINF_SUCCESS;
1643//}
1644
1645/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
1646FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib);
1647//FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
1648//{
1649// IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");
1650// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1651// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1652// {
1653// /*
1654// * Register, register.
1655// */
1656// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1657// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1658//
1659// IEM_MC_BEGIN(3, 0);
1660// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1661// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1662// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1663// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1664// IEM_MC_PREPARE_SSE_USAGE();
1665// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1666// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1667// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
1668// IEM_MC_ADVANCE_RIP();
1669// IEM_MC_END();
1670// }
1671// else
1672// {
1673// /*
1674// * Register, memory.
1675// */
1676// IEM_MC_BEGIN(3, 2);
1677// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1678// IEM_MC_LOCAL(RTUINT128U, uSrc);
1679// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1680// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1681//
1682// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1683// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1684// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1685// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1686// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1687//
1688// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1689// IEM_MC_PREPARE_SSE_USAGE();
1690// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1691// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg);
1692//
1693// IEM_MC_ADVANCE_RIP();
1694// IEM_MC_END();
1695// }
1696// return VINF_SUCCESS;
1697//}
1698
1699/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
1700FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib);
1701//FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
1702//{
1703// IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");
1704// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1705// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1706// {
1707// /*
1708// * Register, register.
1709// */
1710// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1711// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1712//
1713// IEM_MC_BEGIN(3, 0);
1714// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1715// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1716// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1717// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718// IEM_MC_PREPARE_SSE_USAGE();
1719// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1720// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1721// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
1722// IEM_MC_ADVANCE_RIP();
1723// IEM_MC_END();
1724// }
1725// else
1726// {
1727// /*
1728// * Register, memory.
1729// */
1730// IEM_MC_BEGIN(3, 2);
1731// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1732// IEM_MC_LOCAL(RTUINT128U, uSrc);
1733// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1734// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1735//
1736// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1737// uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
1738// IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
1739// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1741//
1742// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1743// IEM_MC_PREPARE_SSE_USAGE();
1744// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1745// IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg);
1746//
1747// IEM_MC_ADVANCE_RIP();
1748// IEM_MC_END();
1749// }
1750// return VINF_SUCCESS;
1751//}
1752
1753
1754/* Opcode VEX.0F 0x71 11/2 - invalid. */
1755/** Opcode VEX.66.0F 0x71 11/2. */
1756FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
1757
1758/* Opcode VEX.0F 0x71 11/4 - invalid */
1759/** Opcode VEX.66.0F 0x71 11/4. */
1760FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
1761
1762/* Opcode VEX.0F 0x71 11/6 - invalid */
1763/** Opcode VEX.66.0F 0x71 11/6. */
1764FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
1765
1766
1767/**
1768 * VEX Group 12 jump table for register variant.
1769 */
1770IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
1771{
1772 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1773 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1774 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1775 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1776 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1777 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1778 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1779 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
1780};
1781AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
1782
1783
1784/** Opcode VEX.0F 0x71. */
1785FNIEMOP_DEF(iemOp_VGrp12)
1786{
1787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1788 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1789 /* register, register */
1790 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
1791 + pVCpu->iem.s.idxPrefix], bRm);
1792 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
1793}
1794
1795
1796/* Opcode VEX.0F 0x72 11/2 - invalid. */
1797/** Opcode VEX.66.0F 0x72 11/2. */
1798FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
1799
1800/* Opcode VEX.0F 0x72 11/4 - invalid. */
1801/** Opcode VEX.66.0F 0x72 11/4. */
1802FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
1803
1804/* Opcode VEX.0F 0x72 11/6 - invalid. */
1805/** Opcode VEX.66.0F 0x72 11/6. */
1806FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
1807
1808
1809/**
1810 * Group 13 jump table for register variant.
1811 */
1812IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
1813{
1814 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1815 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1816 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1817 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1818 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1819 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1820 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1821 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
1822};
1823AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
1824
1825/** Opcode VEX.0F 0x72. */
1826FNIEMOP_DEF(iemOp_VGrp13)
1827{
1828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1830 /* register, register */
1831 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
1832 + pVCpu->iem.s.idxPrefix], bRm);
1833 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
1834}
1835
1836
1837/* Opcode VEX.0F 0x73 11/2 - invalid. */
1838/** Opcode VEX.66.0F 0x73 11/2. */
1839FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
1840
1841/** Opcode VEX.66.0F 0x73 11/3. */
1842FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
1843
1844/* Opcode VEX.0F 0x73 11/6 - invalid. */
1845/** Opcode VEX.66.0F 0x73 11/6. */
1846FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
1847
1848/** Opcode VEX.66.0F 0x73 11/7. */
1849FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
1850
1851/**
1852 * Group 14 jump table for register variant.
1853 */
1854IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
1855{
1856 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1857 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1858 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1859 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1860 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1861 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
1862 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1863 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
1864};
1865AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
1866
1867
1868/** Opcode VEX.0F 0x73. */
1869FNIEMOP_DEF(iemOp_VGrp14)
1870{
1871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1873 /* register, register */
1874 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
1875 + pVCpu->iem.s.idxPrefix], bRm);
1876 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
1877}
1878
1879
1880///**
1881// * Common worker for SSE2 instructions on the forms:
1882// * pxxx xmm1, xmm2/mem128
1883// *
1884// * Proper alignment of the 128-bit operand is enforced.
1885// * Exceptions type 4. SSE2 cpuid checks.
1886// */
1887//FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
1888//{
1889// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1890// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1891// {
1892// /*
1893// * Register, register.
1894// */
1895// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1896// IEM_MC_BEGIN(2, 0);
1897// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1898// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
1899// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1900// IEM_MC_PREPARE_SSE_USAGE();
1901// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1902// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1903// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1904// IEM_MC_ADVANCE_RIP();
1905// IEM_MC_END();
1906// }
1907// else
1908// {
1909// /*
1910// * Register, memory.
1911// */
1912// IEM_MC_BEGIN(2, 2);
1913// IEM_MC_ARG(PRTUINT128U, pDst, 0);
1914// IEM_MC_LOCAL(RTUINT128U, uSrc);
1915// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
1916// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1917//
1918// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1919// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1920// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1921// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922//
1923// IEM_MC_PREPARE_SSE_USAGE();
1924// IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1925// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
1926//
1927// IEM_MC_ADVANCE_RIP();
1928// IEM_MC_END();
1929// }
1930// return VINF_SUCCESS;
1931//}
1932
1933
1934/* Opcode VEX.0F 0x74 - invalid */
1935
1936/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
1937FNIEMOP_STUB(iemOp_vpcmpeqb_Vx_Hx_Wx);
1938//FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
1939//{
1940// IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb");
1941// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
1942//}
1943
1944/* Opcode VEX.F3.0F 0x74 - invalid */
1945/* Opcode VEX.F2.0F 0x74 - invalid */
1946
1947
1948/* Opcode VEX.0F 0x75 - invalid */
1949
1950/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
1951FNIEMOP_STUB(iemOp_vpcmpeqw_Vx_Hx_Wx);
1952//FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
1953//{
1954// IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw");
1955// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
1956//}
1957
1958/* Opcode VEX.F3.0F 0x75 - invalid */
1959/* Opcode VEX.F2.0F 0x75 - invalid */
1960
1961
1962/* Opcode VEX.0F 0x76 - invalid */
1963
1964/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
1965FNIEMOP_STUB(iemOp_vpcmpeqd_Vx_Hx_Wx);
1966//FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
1967//{
1968// IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd");
1969// return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
1970//}
1971
1972/* Opcode VEX.F3.0F 0x76 - invalid */
1973/* Opcode VEX.F2.0F 0x76 - invalid */
1974
1975
1976/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
1977FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
1978/* Opcode VEX.66.0F 0x77 - invalid */
1979/* Opcode VEX.F3.0F 0x77 - invalid */
1980/* Opcode VEX.F2.0F 0x77 - invalid */
1981
1982/* Opcode VEX.0F 0x78 - invalid */
1983/* Opcode VEX.66.0F 0x78 - invalid */
1984/* Opcode VEX.F3.0F 0x78 - invalid */
1985/* Opcode VEX.F2.0F 0x78 - invalid */
1986
1987/* Opcode VEX.0F 0x79 - invalid */
1988/* Opcode VEX.66.0F 0x79 - invalid */
1989/* Opcode VEX.F3.0F 0x79 - invalid */
1990/* Opcode VEX.F2.0F 0x79 - invalid */
1991
1992/* Opcode VEX.0F 0x7a - invalid */
1993/* Opcode VEX.66.0F 0x7a - invalid */
1994/* Opcode VEX.F3.0F 0x7a - invalid */
1995/* Opcode VEX.F2.0F 0x7a - invalid */
1996
1997/* Opcode VEX.0F 0x7b - invalid */
1998/* Opcode VEX.66.0F 0x7b - invalid */
1999/* Opcode VEX.F3.0F 0x7b - invalid */
2000/* Opcode VEX.F2.0F 0x7b - invalid */
2001
2002/* Opcode VEX.0F 0x7c - invalid */
2003/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
2004FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
2005/* Opcode VEX.F3.0F 0x7c - invalid */
2006/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
2007FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
2008
2009/* Opcode VEX.0F 0x7d - invalid */
2010/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
2011FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
2012/* Opcode VEX.F3.0F 0x7d - invalid */
2013/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
2014FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
2015
2016
2017/* Opcode VEX.0F 0x7e - invalid */
2018
2019/** Opcode VEX.66.0F 0x7e - vmovd_q Ey, Vy */
2020FNIEMOP_STUB(iemOp_vmovd_q_Ey_Vy);
2021//FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
2022//{
2023// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2024// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2025// IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq");
2026// else
2027// IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd");
2028// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2029// {
2030// /* greg, XMM */
2031// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032// IEM_MC_BEGIN(0, 1);
2033// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2034// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2035// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2036// {
2037// IEM_MC_LOCAL(uint64_t, u64Tmp);
2038// IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2039// IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
2040// }
2041// else
2042// {
2043// IEM_MC_LOCAL(uint32_t, u32Tmp);
2044// IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2045// IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
2046// }
2047// IEM_MC_ADVANCE_RIP();
2048// IEM_MC_END();
2049// }
2050// else
2051// {
2052// /* [mem], XMM */
2053// IEM_MC_BEGIN(0, 2);
2054// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2055// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2056// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2057// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2058// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2059// if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2060// {
2061// IEM_MC_LOCAL(uint64_t, u64Tmp);
2062// IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2063// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
2064// }
2065// else
2066// {
2067// IEM_MC_LOCAL(uint32_t, u32Tmp);
2068// IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2069// IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
2070// }
2071// IEM_MC_ADVANCE_RIP();
2072// IEM_MC_END();
2073// }
2074// return VINF_SUCCESS;
2075//}
2076
2077/** Opcode VEX.F3.0F 0x7e - vmovq Vq, Wq */
2078FNIEMOP_STUB(iemOp_vmovq_Vq_Wq);
2079/* Opcode VEX.F2.0F 0x7e - invalid */
2080
2081
2082/* Opcode VEX.0F 0x7f - invalid */
2083
2084/** Opcode VEX.66.0F 0x7f - vmovdqa Wx,Vx */
2085FNIEMOP_STUB(iemOp_vmovdqa_Wx_Vx);
2086//FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
2087//{
2088// IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");
2089// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2090// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2091// {
2092// /*
2093// * Register, register.
2094// */
2095// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2096// IEM_MC_BEGIN(0, 0);
2097// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2098// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2099// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2100// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2101// IEM_MC_ADVANCE_RIP();
2102// IEM_MC_END();
2103// }
2104// else
2105// {
2106// /*
2107// * Register, memory.
2108// */
2109// IEM_MC_BEGIN(0, 2);
2110// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2111// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2112//
2113// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2114// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2115// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2116// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2117//
2118// IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2119// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
2120//
2121// IEM_MC_ADVANCE_RIP();
2122// IEM_MC_END();
2123// }
2124// return VINF_SUCCESS;
2125//}
2126
2127/** Opcode VEX.F3.0F 0x7f - vmovdqu Wx,Vx */
2128FNIEMOP_STUB(iemOp_vmovdqu_Wx_Vx);
2129//FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
2130//{
2131// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2132// IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");
2133// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2134// {
2135// /*
2136// * Register, register.
2137// */
2138// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2139// IEM_MC_BEGIN(0, 0);
2140// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2141// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2142// IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2143// ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2144// IEM_MC_ADVANCE_RIP();
2145// IEM_MC_END();
2146// }
2147// else
2148// {
2149// /*
2150// * Register, memory.
2151// */
2152// IEM_MC_BEGIN(0, 2);
2153// IEM_MC_LOCAL(RTUINT128U, u128Tmp);
2154// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2155//
2156// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2157// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2159// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2160//
2161// IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2162// IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
2163//
2164// IEM_MC_ADVANCE_RIP();
2165// IEM_MC_END();
2166// }
2167// return VINF_SUCCESS;
2168//}
2169
2170/* Opcode VEX.F2.0F 0x7f - invalid */
2171
2172
2173/* Opcode VEX.0F 0x80 - invalid */
2174/* Opcode VEX.0F 0x81 - invalid */
2175/* Opcode VEX.0F 0x82 - invalid */
2176/* Opcode VEX.0F 0x83 - invalid */
2177/* Opcode VEX.0F 0x84 - invalid */
2178/* Opcode VEX.0F 0x85 - invalid */
2179/* Opcode VEX.0F 0x86 - invalid */
2180/* Opcode VEX.0F 0x87 - invalid */
2181/* Opcode VEX.0F 0x88 - invalid */
2182/* Opcode VEX.0F 0x89 - invalid */
2183/* Opcode VEX.0F 0x8a - invalid */
2184/* Opcode VEX.0F 0x8b - invalid */
2185/* Opcode VEX.0F 0x8c - invalid */
2186/* Opcode VEX.0F 0x8d - invalid */
2187/* Opcode VEX.0F 0x8e - invalid */
2188/* Opcode VEX.0F 0x8f - invalid */
2189/* Opcode VEX.0F 0x90 - invalid */
2190/* Opcode VEX.0F 0x91 - invalid */
2191/* Opcode VEX.0F 0x92 - invalid */
2192/* Opcode VEX.0F 0x93 - invalid */
2193/* Opcode VEX.0F 0x94 - invalid */
2194/* Opcode VEX.0F 0x95 - invalid */
2195/* Opcode VEX.0F 0x96 - invalid */
2196/* Opcode VEX.0F 0x97 - invalid */
2197/* Opcode VEX.0F 0x98 - invalid */
2198/* Opcode VEX.0F 0x99 - invalid */
2199/* Opcode VEX.0F 0x9a - invalid */
2200/* Opcode VEX.0F 0x9b - invalid */
2201/* Opcode VEX.0F 0x9c - invalid */
2202/* Opcode VEX.0F 0x9d - invalid */
2203/* Opcode VEX.0F 0x9e - invalid */
2204/* Opcode VEX.0F 0x9f - invalid */
2205/* Opcode VEX.0F 0xa0 - invalid */
2206/* Opcode VEX.0F 0xa1 - invalid */
2207/* Opcode VEX.0F 0xa2 - invalid */
2208/* Opcode VEX.0F 0xa3 - invalid */
2209/* Opcode VEX.0F 0xa4 - invalid */
2210/* Opcode VEX.0F 0xa5 - invalid */
2211/* Opcode VEX.0F 0xa6 - invalid */
2212/* Opcode VEX.0F 0xa7 - invalid */
2213/* Opcode VEX.0F 0xa8 - invalid */
2214/* Opcode VEX.0F 0xa9 - invalid */
2215/* Opcode VEX.0F 0xaa - invalid */
2216/* Opcode VEX.0F 0xab - invalid */
2217/* Opcode VEX.0F 0xac - invalid */
2218/* Opcode VEX.0F 0xad - invalid */
2219
2220
2221/* Opcode VEX.0F 0xae mem/0 - invalid. */
2222/* Opcode VEX.0F 0xae mem/1 - invalid. */
2223
2224/**
2225 * @ opmaps grp15
2226 * @ opcode !11/2
2227 * @ oppfx none
2228 * @ opcpuid sse
2229 * @ opgroup og_sse_mxcsrsm
2230 * @ opxcpttype 5
2231 * @ optest op1=0 -> mxcsr=0
2232 * @ optest op1=0x2083 -> mxcsr=0x2083
2233 * @ optest op1=0xfffffffe -> value.xcpt=0xd
2234 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
2235 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
2236 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
2237 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
2238 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
2239 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
2240 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
2241 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
2242 */
2243FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
2244//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
2245//{
2246// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2247// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
2248// return IEMOP_RAISE_INVALID_OPCODE();
2249//
2250// IEM_MC_BEGIN(2, 0);
2251// IEM_MC_ARG(uint8_t, iEffSeg, 0);
2252// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
2253// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
2254// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2255// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2256// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
2257// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
2258// IEM_MC_END();
2259// return VINF_SUCCESS;
2260//}
2261
2262
2263/**
2264 * @opmaps vexgrp15
2265 * @opcode !11/3
2266 * @oppfx none
2267 * @opcpuid avx
2268 * @opgroup og_avx_mxcsrsm
2269 * @opxcpttype 5
2270 * @optest mxcsr=0 -> op1=0
2271 * @optest mxcsr=0x2083 -> op1=0x2083
2272 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
2273 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
2274 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
2275 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
2276 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
2277 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
2278 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
2279 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
2280 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
2281 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
2282 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
2283 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
2284 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
2285 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
2286 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
2287 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
2288 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
2289 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
2290 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
2291 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
2292 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
2293 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
2294 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
2295 * -> value.xcpt=0x6
2296 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
2297 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
2298 * APMv4 rev 3.17 page 509.
2299 * @todo Test this instruction on AMD Ryzen.
2300 */
2301FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
2302{
2303 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2304 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
2305 return IEMOP_RAISE_INVALID_OPCODE();
2306
2307 IEM_MC_BEGIN(2, 0);
2308 IEM_MC_ARG(uint8_t, iEffSeg, 0);
2309 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
2310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
2311 IEMOP_HLP_DONE_VEX_DECODING_L_ZERO_NO_VVV();
2312 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2313 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
2314 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
2315 IEM_MC_END();
2316 return VINF_SUCCESS;
2317}
2318
2319/* Opcode VEX.0F 0xae mem/4 - invalid. */
2320/* Opcode VEX.0F 0xae mem/5 - invalid. */
2321/* Opcode VEX.0F 0xae mem/6 - invalid. */
2322/* Opcode VEX.0F 0xae mem/7 - invalid. */
2323
2324/* Opcode VEX.0F 0xae 11b/0 - invalid. */
2325/* Opcode VEX.0F 0xae 11b/1 - invalid. */
2326/* Opcode VEX.0F 0xae 11b/2 - invalid. */
2327/* Opcode VEX.0F 0xae 11b/3 - invalid. */
2328/* Opcode VEX.0F 0xae 11b/4 - invalid. */
2329/* Opcode VEX.0F 0xae 11b/5 - invalid. */
2330/* Opcode VEX.0F 0xae 11b/6 - invalid. */
2331/* Opcode VEX.0F 0xae 11b/7 - invalid. */
2332
2333/**
2334 * Vex group 15 jump table for memory variant.
2335 */
2336IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
2337{ /* pfx: none, 066h, 0f3h, 0f2h */
2338 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2339 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2340 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2341 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2342 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2343 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2344 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2345 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
2346};
2347AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
2348
2349
2350/** Opcode vex. 0xae. */
2351FNIEMOP_DEF(iemOp_VGrp15)
2352{
2353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2354 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2355 /* register, register */
2356 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
2357
2358 /* memory, register */
2359 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4
2360 + pVCpu->iem.s.idxPrefix], bRm);
2361}
2362
2363
2364/* Opcode VEX.0F 0xaf - invalid. */
2365
2366/* Opcode VEX.0F 0xb0 - invalid. */
2367/* Opcode VEX.0F 0xb1 - invalid. */
2368/* Opcode VEX.0F 0xb2 - invalid. */
2369/* Opcode VEX.0F 0xb2 - invalid. */
2370/* Opcode VEX.0F 0xb3 - invalid. */
2371/* Opcode VEX.0F 0xb4 - invalid. */
2372/* Opcode VEX.0F 0xb5 - invalid. */
2373/* Opcode VEX.0F 0xb6 - invalid. */
2374/* Opcode VEX.0F 0xb7 - invalid. */
2375/* Opcode VEX.0F 0xb8 - invalid. */
2376/* Opcode VEX.0F 0xb9 - invalid. */
2377/* Opcode VEX.0F 0xba - invalid. */
2378/* Opcode VEX.0F 0xbb - invalid. */
2379/* Opcode VEX.0F 0xbc - invalid. */
2380/* Opcode VEX.0F 0xbd - invalid. */
2381/* Opcode VEX.0F 0xbe - invalid. */
2382/* Opcode VEX.0F 0xbf - invalid. */
2383
2384/* Opcode VEX.0F 0xc0 - invalid. */
2385/* Opcode VEX.66.0F 0xc0 - invalid. */
2386/* Opcode VEX.F3.0F 0xc0 - invalid. */
2387/* Opcode VEX.F2.0F 0xc0 - invalid. */
2388
2389/* Opcode VEX.0F 0xc1 - invalid. */
2390/* Opcode VEX.66.0F 0xc1 - invalid. */
2391/* Opcode VEX.F3.0F 0xc1 - invalid. */
2392/* Opcode VEX.F2.0F 0xc1 - invalid. */
2393
2394/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
2395FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
2396/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
2397FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
2398/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
2399FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
2400/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
2401FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
2402
2403/* Opcode VEX.0F 0xc3 - invalid */
2404/* Opcode VEX.66.0F 0xc3 - invalid */
2405/* Opcode VEX.F3.0F 0xc3 - invalid */
2406/* Opcode VEX.F2.0F 0xc3 - invalid */
2407
2408/* Opcode VEX.0F 0xc4 - invalid */
2409/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
2410FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
2411/* Opcode VEX.F3.0F 0xc4 - invalid */
2412/* Opcode VEX.F2.0F 0xc4 - invalid */
2413
2414/* Opcode VEX.0F 0xc5 - invlid */
2415/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
2416FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
2417/* Opcode VEX.F3.0F 0xc5 - invalid */
2418/* Opcode VEX.F2.0F 0xc5 - invalid */
2419
2420/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
2421FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
2422/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
2423FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
2424/* Opcode VEX.F3.0F 0xc6 - invalid */
2425/* Opcode VEX.F2.0F 0xc6 - invalid */
2426
2427/* Opcode VEX.0F 0xc7 - invalid */
2428/* Opcode VEX.66.0F 0xc7 - invalid */
2429/* Opcode VEX.F3.0F 0xc7 - invalid */
2430/* Opcode VEX.F2.0F 0xc7 - invalid */
2431
2432/* Opcode VEX.0F 0xc8 - invalid */
2433/* Opcode VEX.0F 0xc9 - invalid */
2434/* Opcode VEX.0F 0xca - invalid */
2435/* Opcode VEX.0F 0xcb - invalid */
2436/* Opcode VEX.0F 0xcc - invalid */
2437/* Opcode VEX.0F 0xcd - invalid */
2438/* Opcode VEX.0F 0xce - invalid */
2439/* Opcode VEX.0F 0xcf - invalid */
2440
2441
2442/* Opcode VEX.0F 0xd0 - invalid */
2443/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
2444FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
2445/* Opcode VEX.F3.0F 0xd0 - invalid */
2446/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
2447FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
2448
2449/* Opcode VEX.0F 0xd1 - invalid */
2450/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
2451FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
2452/* Opcode VEX.F3.0F 0xd1 - invalid */
2453/* Opcode VEX.F2.0F 0xd1 - invalid */
2454
2455/* Opcode VEX.0F 0xd2 - invalid */
2456/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
2457FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
2458/* Opcode VEX.F3.0F 0xd2 - invalid */
2459/* Opcode VEX.F2.0F 0xd2 - invalid */
2460
2461/* Opcode VEX.0F 0xd3 - invalid */
2462/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
2463FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
2464/* Opcode VEX.F3.0F 0xd3 - invalid */
2465/* Opcode VEX.F2.0F 0xd3 - invalid */
2466
2467/* Opcode VEX.0F 0xd4 - invalid */
2468/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
2469FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
2470/* Opcode VEX.F3.0F 0xd4 - invalid */
2471/* Opcode VEX.F2.0F 0xd4 - invalid */
2472
2473/* Opcode VEX.0F 0xd5 - invalid */
2474/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
2475FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
2476/* Opcode VEX.F3.0F 0xd5 - invalid */
2477/* Opcode VEX.F2.0F 0xd5 - invalid */
2478
2479/* Opcode VEX.0F 0xd6 - invalid */
2480
2481/**
2482 * @ opcode 0xd6
2483 * @ oppfx 0x66
2484 * @ opcpuid sse2
2485 * @ opgroup og_sse2_pcksclr_datamove
2486 * @ opxcpttype none
2487 * @ optest op1=-1 op2=2 -> op1=2
2488 * @ optest op1=0 op2=-42 -> op1=-42
2489 */
2490FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
2491//FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
2492//{
2493// IEMOP_MNEMONIC2(MR, VMOVQ, vmovq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
2494// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2495// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2496// {
2497// /*
2498// * Register, register.
2499// */
2500// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2501// IEM_MC_BEGIN(0, 2);
2502// IEM_MC_LOCAL(uint64_t, uSrc);
2503//
2504// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2505// IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2506//
2507// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2508// IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
2509//
2510// IEM_MC_ADVANCE_RIP();
2511// IEM_MC_END();
2512// }
2513// else
2514// {
2515// /*
2516// * Memory, register.
2517// */
2518// IEM_MC_BEGIN(0, 2);
2519// IEM_MC_LOCAL(uint64_t, uSrc);
2520// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2521//
2522// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2523// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2524// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2525// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2526//
2527// IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2528// IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2529//
2530// IEM_MC_ADVANCE_RIP();
2531// IEM_MC_END();
2532// }
2533// return VINF_SUCCESS;
2534//}
2535
2536/* Opcode VEX.F3.0F 0xd6 - invalid */
2537/* Opcode VEX.F2.0F 0xd6 - invalid */
2538
2539
2540/* Opcode VEX.0F 0xd7 - invalid */
2541
2542/** Opcode VEX.66.0F 0xd7 - */
2543FNIEMOP_STUB(iemOp_vpmovmskb_Gd_Ux);
2544//FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
2545//{
2546// /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
2547// /** @todo testcase: Check that the instruction implicitly clears the high
2548// * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
2549// * and opcode modifications are made to work with the whole width (not
2550// * just 128). */
2551// IEMOP_MNEMONIC(vpmovmskb_Gd_Nq, "vpmovmskb Gd, Ux");
2552// /* Docs says register only. */
2553// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2554// if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
2555// {
2556// IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
2557// IEM_MC_BEGIN(2, 0);
2558// IEM_MC_ARG(uint64_t *, pDst, 0);
2559// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
2560// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2561// IEM_MC_PREPARE_SSE_USAGE();
2562// IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2563// IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2564// IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
2565// IEM_MC_ADVANCE_RIP();
2566// IEM_MC_END();
2567// return VINF_SUCCESS;
2568// }
2569// return IEMOP_RAISE_INVALID_OPCODE();
2570//}
2571
2572/* Opcode VEX.F3.0F 0xd7 - invalid */
2573/* Opcode VEX.F2.0F 0xd7 - invalid */
2574
2575
2576/* Opcode VEX.0F 0xd8 - invalid */
2577/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
2578FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
2579/* Opcode VEX.F3.0F 0xd8 - invalid */
2580/* Opcode VEX.F2.0F 0xd8 - invalid */
2581
2582/* Opcode VEX.0F 0xd9 - invalid */
2583/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
2584FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
2585/* Opcode VEX.F3.0F 0xd9 - invalid */
2586/* Opcode VEX.F2.0F 0xd9 - invalid */
2587
2588/* Opcode VEX.0F 0xda - invalid */
2589/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
2590FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
2591/* Opcode VEX.F3.0F 0xda - invalid */
2592/* Opcode VEX.F2.0F 0xda - invalid */
2593
2594/* Opcode VEX.0F 0xdb - invalid */
2595/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, W */
2596FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
2597/* Opcode VEX.F3.0F 0xdb - invalid */
2598/* Opcode VEX.F2.0F 0xdb - invalid */
2599
2600/* Opcode VEX.0F 0xdc - invalid */
2601/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
2602FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
2603/* Opcode VEX.F3.0F 0xdc - invalid */
2604/* Opcode VEX.F2.0F 0xdc - invalid */
2605
2606/* Opcode VEX.0F 0xdd - invalid */
2607/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
2608FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
2609/* Opcode VEX.F3.0F 0xdd - invalid */
2610/* Opcode VEX.F2.0F 0xdd - invalid */
2611
2612/* Opcode VEX.0F 0xde - invalid */
2613/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */
2614FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
2615/* Opcode VEX.F3.0F 0xde - invalid */
2616/* Opcode VEX.F2.0F 0xde - invalid */
2617
2618/* Opcode VEX.0F 0xdf - invalid */
2619/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
2620FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
2621/* Opcode VEX.F3.0F 0xdf - invalid */
2622/* Opcode VEX.F2.0F 0xdf - invalid */
2623
2624/* Opcode VEX.0F 0xe0 - invalid */
2625/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
2626FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
2627/* Opcode VEX.F3.0F 0xe0 - invalid */
2628/* Opcode VEX.F2.0F 0xe0 - invalid */
2629
2630/* Opcode VEX.0F 0xe1 - invalid */
2631/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
2632FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
2633/* Opcode VEX.F3.0F 0xe1 - invalid */
2634/* Opcode VEX.F2.0F 0xe1 - invalid */
2635
2636/* Opcode VEX.0F 0xe2 - invalid */
2637/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
2638FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
2639/* Opcode VEX.F3.0F 0xe2 - invalid */
2640/* Opcode VEX.F2.0F 0xe2 - invalid */
2641
2642/* Opcode VEX.0F 0xe3 - invalid */
2643/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
2644FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
2645/* Opcode VEX.F3.0F 0xe3 - invalid */
2646/* Opcode VEX.F2.0F 0xe3 - invalid */
2647
2648/* Opcode VEX.0F 0xe4 - invalid */
2649/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */
2650FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
2651/* Opcode VEX.F3.0F 0xe4 - invalid */
2652/* Opcode VEX.F2.0F 0xe4 - invalid */
2653
2654/* Opcode VEX.0F 0xe5 - invalid */
2655/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
2656FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
2657/* Opcode VEX.F3.0F 0xe5 - invalid */
2658/* Opcode VEX.F2.0F 0xe5 - invalid */
2659
2660/* Opcode VEX.0F 0xe6 - invalid */
2661/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
2662FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
2663/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
2664FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
2665/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
2666FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
2667
2668
2669/* Opcode VEX.0F 0xe7 - invalid */
2670
2671/** Opcode VEX.66.0F 0xe7 - vmovntdq Mx, Vx */
2672FNIEMOP_STUB(iemOp_vmovntdq_Mx_Vx);
2673//FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
2674//{
2675// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2676// if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2677// {
2678// /* Register, memory. */
2679// IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");
2680// IEM_MC_BEGIN(0, 2);
2681// IEM_MC_LOCAL(RTUINT128U, uSrc);
2682// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2683//
2684// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2685// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2686// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2687// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2688//
2689// IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2690// IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2691//
2692// IEM_MC_ADVANCE_RIP();
2693// IEM_MC_END();
2694// return VINF_SUCCESS;
2695// }
2696//
2697// /* The register, register encoding is invalid. */
2698// return IEMOP_RAISE_INVALID_OPCODE();
2699//}
2700
2701/* Opcode VEX.F3.0F 0xe7 - invalid */
2702/* Opcode VEX.F2.0F 0xe7 - invalid */
2703
2704
2705/* Opcode VEX.0F 0xe8 - invalid */
2706/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
2707FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
2708/* Opcode VEX.F3.0F 0xe8 - invalid */
2709/* Opcode VEX.F2.0F 0xe8 - invalid */
2710
2711/* Opcode VEX.0F 0xe9 - invalid */
2712/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
2713FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
2714/* Opcode VEX.F3.0F 0xe9 - invalid */
2715/* Opcode VEX.F2.0F 0xe9 - invalid */
2716
2717/* Opcode VEX.0F 0xea - invalid */
2718/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
2719FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
2720/* Opcode VEX.F3.0F 0xea - invalid */
2721/* Opcode VEX.F2.0F 0xea - invalid */
2722
2723/* Opcode VEX.0F 0xeb - invalid */
2724/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, W */
2725FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
2726/* Opcode VEX.F3.0F 0xeb - invalid */
2727/* Opcode VEX.F2.0F 0xeb - invalid */
2728
2729/* Opcode VEX.0F 0xec - invalid */
2730/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
2731FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
2732/* Opcode VEX.F3.0F 0xec - invalid */
2733/* Opcode VEX.F2.0F 0xec - invalid */
2734
2735/* Opcode VEX.0F 0xed - invalid */
2736/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
2737FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
2738/* Opcode VEX.F3.0F 0xed - invalid */
2739/* Opcode VEX.F2.0F 0xed - invalid */
2740
2741/* Opcode VEX.0F 0xee - invalid */
2742/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */
2743FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
2744/* Opcode VEX.F3.0F 0xee - invalid */
2745/* Opcode VEX.F2.0F 0xee - invalid */
2746
2747
2748/* Opcode VEX.0F 0xef - invalid */
2749
2750/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
2751FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
2752{
2753 IEMOP_MNEMONIC(vpxor, "vpxor");
2754 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor);
2755}
2756
2757/* Opcode VEX.F3.0F 0xef - invalid */
2758/* Opcode VEX.F2.0F 0xef - invalid */
2759
2760/* Opcode VEX.0F 0xf0 - invalid */
2761/* Opcode VEX.66.0F 0xf0 - invalid */
2762/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
2763FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
2764
2765/* Opcode VEX.0F 0xf1 - invalid */
2766/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
2767FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
2768/* Opcode VEX.F2.0F 0xf1 - invalid */
2769
2770/* Opcode VEX.0F 0xf2 - invalid */
2771/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
2772FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
2773/* Opcode VEX.F2.0F 0xf2 - invalid */
2774
2775/* Opcode VEX.0F 0xf3 - invalid */
2776/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
2777FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
2778/* Opcode VEX.F2.0F 0xf3 - invalid */
2779
2780/* Opcode VEX.0F 0xf4 - invalid */
2781/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
2782FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
2783/* Opcode VEX.F2.0F 0xf4 - invalid */
2784
2785/* Opcode VEX.0F 0xf5 - invalid */
2786/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
2787FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
2788/* Opcode VEX.F2.0F 0xf5 - invalid */
2789
2790/* Opcode VEX.0F 0xf6 - invalid */
2791/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
2792FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
2793/* Opcode VEX.F2.0F 0xf6 - invalid */
2794
2795/* Opcode VEX.0F 0xf7 - invalid */
2796/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
2797FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
2798/* Opcode VEX.F2.0F 0xf7 - invalid */
2799
2800/* Opcode VEX.0F 0xf8 - invalid */
2801/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
2802FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
2803/* Opcode VEX.F2.0F 0xf8 - invalid */
2804
2805/* Opcode VEX.0F 0xf9 - invalid */
2806/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
2807FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
2808/* Opcode VEX.F2.0F 0xf9 - invalid */
2809
2810/* Opcode VEX.0F 0xfa - invalid */
2811/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
2812FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
2813/* Opcode VEX.F2.0F 0xfa - invalid */
2814
2815/* Opcode VEX.0F 0xfb - invalid */
2816/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
2818/* Opcode VEX.F2.0F 0xfb - invalid */
2819
2820/* Opcode VEX.0F 0xfc - invalid */
2821/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
2822FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
2823/* Opcode VEX.F2.0F 0xfc - invalid */
2824
2825/* Opcode VEX.0F 0xfd - invalid */
2826/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
2827FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
2828/* Opcode VEX.F2.0F 0xfd - invalid */
2829
2830/* Opcode VEX.0F 0xfe - invalid */
2831/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
2832FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
2833/* Opcode VEX.F2.0F 0xfe - invalid */
2834
2835
2836/** Opcode **** 0x0f 0xff - UD0 */
2837FNIEMOP_DEF(iemOp_vud0)
2838{
2839 IEMOP_MNEMONIC(vud0, "vud0");
2840 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
2841 {
2842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
2843#ifndef TST_IEM_CHECK_MC
2844 RTGCPTR GCPtrEff;
2845 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
2846 if (rcStrict != VINF_SUCCESS)
2847 return rcStrict;
2848#endif
2849 IEMOP_HLP_DONE_DECODING();
2850 }
2851 return IEMOP_RAISE_INVALID_OPCODE();
2852}
2853
2854
2855
2856/**
2857 * VEX opcode map \#1.
2858 *
2859 * @sa g_apfnTwoByteMap
2860 */
2861IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
2862{
2863 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2864 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
2865 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
2866 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
2867 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
2868 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
2869 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
2870 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
2871 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
2872 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
2873 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
2874 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
2875 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
2876 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
2877 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
2878 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
2879 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
2880
2881 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
2882 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
2883 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
2884 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2885 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2886 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2887 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
2888 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2889 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
2890 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
2891 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
2892 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
2893 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
2894 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
2895 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
2896 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
2897
2898 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
2899 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
2900 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
2901 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
2902 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
2903 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
2904 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
2905 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
2906 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2907 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2908 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
2909 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2910 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
2911 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
2912 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2913 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2914
2915 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
2916 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
2917 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
2918 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
2919 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
2920 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
2921 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
2922 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
2923 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2924 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2925 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2926 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2927 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2928 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2929 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2930 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
2931
2932 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
2933 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
2934 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
2935 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
2936 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
2937 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
2938 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
2939 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
2940 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
2941 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
2942 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
2943 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
2944 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
2945 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
2946 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
2947 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
2948
2949 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2950 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
2951 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
2952 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
2953 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2954 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2955 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2956 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2957 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
2958 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
2959 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
2960 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
2961 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
2962 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
2963 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
2964 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
2965
2966 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2967 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2968 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2969 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2970 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2971 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2972 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2973 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2974 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2975 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2976 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2977 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2978 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2979 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2980 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2981 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
2982
2983 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
2984 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2985 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2986 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2987 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2988 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2989 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2990 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2991 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
2992 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
2993 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2994 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2995 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
2996 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
2997 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
2998 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
2999
3000 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3001 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3002 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3003 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3004 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3005 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3006 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3007 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3008 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3009 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
3010 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
3011 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
3012 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
3013 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
3014 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
3015 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
3016
3017 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
3018 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
3019 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
3020 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
3021 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
3022 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
3023 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
3024 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
3025 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
3026 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
3027 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
3028 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
3029 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
3030 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
3031 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
3032 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
3033
3034 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3035 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3036 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3037 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3038 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3039 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3040 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3041 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3042 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3043 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3044 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
3045 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
3046 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
3047 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
3048 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
3049 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
3050
3051 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3052 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3053 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3054 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3055 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3056 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3057 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3058 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3059 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3060 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3061 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
3062 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
3063 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
3064 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
3065 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
3066 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
3067
3068 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3069 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3070 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
3071 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3072 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3073 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3074 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
3075 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3076 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3077 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3078 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
3079 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
3080 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
3081 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
3082 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
3083 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
3084
3085 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
3086 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3087 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3088 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3089 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3090 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3091 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3092 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3093 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3094 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3095 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3096 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3097 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3098 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3099 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3100 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3101
3102 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3103 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3104 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3105 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3106 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3107 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3108 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
3109 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3110 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3111 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3112 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3113 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3114 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3115 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3116 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3117 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3118
3119 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
3120 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3121 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3122 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3123 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3124 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3125 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3126 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3127 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3128 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3129 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3130 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3131 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3132 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3133 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3134 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
3135};
3136AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
3137/** @} */
3138
3139
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette