VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65699

Last change on this file since 65699 was 65650, checked in by vboxsync, 8 years ago

gcc 7: fall thru

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 664.8 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65650 2017-02-07 11:46:04Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
560 {
561 RTGCPTR GCPtrEff;
562 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
563 if (rcStrict != VINF_SUCCESS)
564 return rcStrict;
565 }
566#endif
567 IEMOP_HLP_DONE_DECODING();
568 }
569 return IEMOP_RAISE_INVALID_OPCODE();
570}
571
572
573/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
574 * immediate. */
575FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
576{
577 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
578 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
579 {
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
581#ifndef TST_IEM_CHECK_MC
582 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
583 {
584 RTGCPTR GCPtrEff;
585 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
586 if (rcStrict != VINF_SUCCESS)
587 return rcStrict;
588 }
589#endif
590 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
591 IEMOP_HLP_DONE_DECODING();
592 }
593 return IEMOP_RAISE_INVALID_OPCODE();
594}
595
596
597/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
598 * sequence. */
599FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
600{
601 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
602 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
603 {
604 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
606#ifndef TST_IEM_CHECK_MC
607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
608 {
609 RTGCPTR GCPtrEff;
610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
611 if (rcStrict != VINF_SUCCESS)
612 return rcStrict;
613 }
614#endif
615 IEMOP_HLP_DONE_DECODING();
616 }
617 return IEMOP_RAISE_INVALID_OPCODE();
618}
619
620
621/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
622 * a 8-byte immediate. */
623FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
624{
625 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
626 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
627 {
628 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
630#ifndef TST_IEM_CHECK_MC
631 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
632 {
633 RTGCPTR GCPtrEff;
634 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
635 if (rcStrict != VINF_SUCCESS)
636 return rcStrict;
637 }
638#endif
639 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
640 IEMOP_HLP_DONE_DECODING();
641 }
642 return IEMOP_RAISE_INVALID_OPCODE();
643}
644
645
646
647/** @name ..... opcodes.
648 *
649 * @{
650 */
651
652/** @} */
653
654
655/** @name Two byte opcodes (first byte 0x0f).
656 *
657 * @{
658 */
659
660/** Opcode 0x0f 0x00 /0. */
661FNIEMOPRM_DEF(iemOp_Grp6_sldt)
662{
663 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
664 IEMOP_HLP_MIN_286();
665 IEMOP_HLP_NO_REAL_OR_V86_MODE();
666
667 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
668 {
669 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
670 switch (pVCpu->iem.s.enmEffOpSize)
671 {
672 case IEMMODE_16BIT:
673 IEM_MC_BEGIN(0, 1);
674 IEM_MC_LOCAL(uint16_t, u16Ldtr);
675 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
676 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
677 IEM_MC_ADVANCE_RIP();
678 IEM_MC_END();
679 break;
680
681 case IEMMODE_32BIT:
682 IEM_MC_BEGIN(0, 1);
683 IEM_MC_LOCAL(uint32_t, u32Ldtr);
684 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
685 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
686 IEM_MC_ADVANCE_RIP();
687 IEM_MC_END();
688 break;
689
690 case IEMMODE_64BIT:
691 IEM_MC_BEGIN(0, 1);
692 IEM_MC_LOCAL(uint64_t, u64Ldtr);
693 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
694 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
695 IEM_MC_ADVANCE_RIP();
696 IEM_MC_END();
697 break;
698
699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
700 }
701 }
702 else
703 {
704 IEM_MC_BEGIN(0, 2);
705 IEM_MC_LOCAL(uint16_t, u16Ldtr);
706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
708 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
709 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
710 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
711 IEM_MC_ADVANCE_RIP();
712 IEM_MC_END();
713 }
714 return VINF_SUCCESS;
715}
716
717
718/** Opcode 0x0f 0x00 /1. */
719FNIEMOPRM_DEF(iemOp_Grp6_str)
720{
721 IEMOP_MNEMONIC(str, "str Rv/Mw");
722 IEMOP_HLP_MIN_286();
723 IEMOP_HLP_NO_REAL_OR_V86_MODE();
724
725 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
726 {
727 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
728 switch (pVCpu->iem.s.enmEffOpSize)
729 {
730 case IEMMODE_16BIT:
731 IEM_MC_BEGIN(0, 1);
732 IEM_MC_LOCAL(uint16_t, u16Tr);
733 IEM_MC_FETCH_TR_U16(u16Tr);
734 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
735 IEM_MC_ADVANCE_RIP();
736 IEM_MC_END();
737 break;
738
739 case IEMMODE_32BIT:
740 IEM_MC_BEGIN(0, 1);
741 IEM_MC_LOCAL(uint32_t, u32Tr);
742 IEM_MC_FETCH_TR_U32(u32Tr);
743 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
744 IEM_MC_ADVANCE_RIP();
745 IEM_MC_END();
746 break;
747
748 case IEMMODE_64BIT:
749 IEM_MC_BEGIN(0, 1);
750 IEM_MC_LOCAL(uint64_t, u64Tr);
751 IEM_MC_FETCH_TR_U64(u64Tr);
752 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
753 IEM_MC_ADVANCE_RIP();
754 IEM_MC_END();
755 break;
756
757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
758 }
759 }
760 else
761 {
762 IEM_MC_BEGIN(0, 2);
763 IEM_MC_LOCAL(uint16_t, u16Tr);
764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
766 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
767 IEM_MC_FETCH_TR_U16(u16Tr);
768 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
769 IEM_MC_ADVANCE_RIP();
770 IEM_MC_END();
771 }
772 return VINF_SUCCESS;
773}
774
775
776/** Opcode 0x0f 0x00 /2. */
777FNIEMOPRM_DEF(iemOp_Grp6_lldt)
778{
779 IEMOP_MNEMONIC(lldt, "lldt Ew");
780 IEMOP_HLP_MIN_286();
781 IEMOP_HLP_NO_REAL_OR_V86_MODE();
782
783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
784 {
785 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
786 IEM_MC_BEGIN(1, 0);
787 IEM_MC_ARG(uint16_t, u16Sel, 0);
788 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 else
793 {
794 IEM_MC_BEGIN(1, 1);
795 IEM_MC_ARG(uint16_t, u16Sel, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
798 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
799 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
800 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
802 IEM_MC_END();
803 }
804 return VINF_SUCCESS;
805}
806
807
808/** Opcode 0x0f 0x00 /3. */
809FNIEMOPRM_DEF(iemOp_Grp6_ltr)
810{
811 IEMOP_MNEMONIC(ltr, "ltr Ew");
812 IEMOP_HLP_MIN_286();
813 IEMOP_HLP_NO_REAL_OR_V86_MODE();
814
815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
816 {
817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
818 IEM_MC_BEGIN(1, 0);
819 IEM_MC_ARG(uint16_t, u16Sel, 0);
820 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 else
825 {
826 IEM_MC_BEGIN(1, 1);
827 IEM_MC_ARG(uint16_t, u16Sel, 0);
828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
831 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
832 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
833 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
834 IEM_MC_END();
835 }
836 return VINF_SUCCESS;
837}
838
839
840/** Opcode 0x0f 0x00 /3. */
841FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
842{
843 IEMOP_HLP_MIN_286();
844 IEMOP_HLP_NO_REAL_OR_V86_MODE();
845
846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
847 {
848 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
849 IEM_MC_BEGIN(2, 0);
850 IEM_MC_ARG(uint16_t, u16Sel, 0);
851 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
852 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 else
857 {
858 IEM_MC_BEGIN(2, 1);
859 IEM_MC_ARG(uint16_t, u16Sel, 0);
860 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
863 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
864 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
865 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
866 IEM_MC_END();
867 }
868 return VINF_SUCCESS;
869}
870
871
872/** Opcode 0x0f 0x00 /4. */
873FNIEMOPRM_DEF(iemOp_Grp6_verr)
874{
875 IEMOP_MNEMONIC(verr, "verr Ew");
876 IEMOP_HLP_MIN_286();
877 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
878}
879
880
881/** Opcode 0x0f 0x00 /5. */
882FNIEMOPRM_DEF(iemOp_Grp6_verw)
883{
884 IEMOP_MNEMONIC(verw, "verw Ew");
885 IEMOP_HLP_MIN_286();
886 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
887}
888
889
890/**
891 * Group 6 jump table.
892 */
893IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
894{
895 iemOp_Grp6_sldt,
896 iemOp_Grp6_str,
897 iemOp_Grp6_lldt,
898 iemOp_Grp6_ltr,
899 iemOp_Grp6_verr,
900 iemOp_Grp6_verw,
901 iemOp_InvalidWithRM,
902 iemOp_InvalidWithRM
903};
904
905/** Opcode 0x0f 0x00. */
906FNIEMOP_DEF(iemOp_Grp6)
907{
908 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
909 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
910}
911
912
913/** Opcode 0x0f 0x01 /0. */
914FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
915{
916 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
917 IEMOP_HLP_MIN_286();
918 IEMOP_HLP_64BIT_OP_SIZE();
919 IEM_MC_BEGIN(2, 1);
920 IEM_MC_ARG(uint8_t, iEffSeg, 0);
921 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
925 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 /0. */
932FNIEMOP_DEF(iemOp_Grp7_vmcall)
933{
934 IEMOP_BITCH_ABOUT_STUB();
935 return IEMOP_RAISE_INVALID_OPCODE();
936}
937
938
939/** Opcode 0x0f 0x01 /0. */
940FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
941{
942 IEMOP_BITCH_ABOUT_STUB();
943 return IEMOP_RAISE_INVALID_OPCODE();
944}
945
946
947/** Opcode 0x0f 0x01 /0. */
948FNIEMOP_DEF(iemOp_Grp7_vmresume)
949{
950 IEMOP_BITCH_ABOUT_STUB();
951 return IEMOP_RAISE_INVALID_OPCODE();
952}
953
954
955/** Opcode 0x0f 0x01 /0. */
956FNIEMOP_DEF(iemOp_Grp7_vmxoff)
957{
958 IEMOP_BITCH_ABOUT_STUB();
959 return IEMOP_RAISE_INVALID_OPCODE();
960}
961
962
963/** Opcode 0x0f 0x01 /1. */
964FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
965{
966 IEMOP_MNEMONIC(sidt, "sidt Ms");
967 IEMOP_HLP_MIN_286();
968 IEMOP_HLP_64BIT_OP_SIZE();
969 IEM_MC_BEGIN(2, 1);
970 IEM_MC_ARG(uint8_t, iEffSeg, 0);
971 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
974 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
975 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
976 IEM_MC_END();
977 return VINF_SUCCESS;
978}
979
980
981/** Opcode 0x0f 0x01 /1. */
982FNIEMOP_DEF(iemOp_Grp7_monitor)
983{
984 IEMOP_MNEMONIC(monitor, "monitor");
985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
986 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
987}
988
989
990/** Opcode 0x0f 0x01 /1. */
991FNIEMOP_DEF(iemOp_Grp7_mwait)
992{
993 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
995 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
996}
997
998
999/** Opcode 0x0f 0x01 /2. */
1000FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1001{
1002 IEMOP_MNEMONIC(lgdt, "lgdt");
1003 IEMOP_HLP_64BIT_OP_SIZE();
1004 IEM_MC_BEGIN(3, 1);
1005 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1006 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1007 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1010 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1011 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1012 IEM_MC_END();
1013 return VINF_SUCCESS;
1014}
1015
1016
1017/** Opcode 0x0f 0x01 0xd0. */
1018FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1019{
1020 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1021 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1022 {
1023 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1024 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027}
1028
1029
1030/** Opcode 0x0f 0x01 0xd1. */
1031FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1032{
1033 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1034 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1035 {
1036 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1037 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1038 }
1039 return IEMOP_RAISE_INVALID_OPCODE();
1040}
1041
1042
1043/** Opcode 0x0f 0x01 /3. */
1044FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1045{
1046 IEMOP_MNEMONIC(lidt, "lidt");
1047 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1048 ? IEMMODE_64BIT
1049 : pVCpu->iem.s.enmEffOpSize;
1050 IEM_MC_BEGIN(3, 1);
1051 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1052 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1053 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1056 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1057 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1058 IEM_MC_END();
1059 return VINF_SUCCESS;
1060}
1061
1062
1063/** Opcode 0x0f 0x01 0xd8. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1065
1066/** Opcode 0x0f 0x01 0xd9. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1068
1069/** Opcode 0x0f 0x01 0xda. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1071
1072/** Opcode 0x0f 0x01 0xdb. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1074
1075/** Opcode 0x0f 0x01 0xdc. */
1076FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1077
1078/** Opcode 0x0f 0x01 0xdd. */
1079FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1080
1081/** Opcode 0x0f 0x01 0xde. */
1082FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1083
1084/** Opcode 0x0f 0x01 0xdf. */
1085FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1086
1087/** Opcode 0x0f 0x01 /4. */
1088FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1089{
1090 IEMOP_MNEMONIC(smsw, "smsw");
1091 IEMOP_HLP_MIN_286();
1092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1093 {
1094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1095 switch (pVCpu->iem.s.enmEffOpSize)
1096 {
1097 case IEMMODE_16BIT:
1098 IEM_MC_BEGIN(0, 1);
1099 IEM_MC_LOCAL(uint16_t, u16Tmp);
1100 IEM_MC_FETCH_CR0_U16(u16Tmp);
1101 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1102 { /* likely */ }
1103 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1104 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1105 else
1106 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1107 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1108 IEM_MC_ADVANCE_RIP();
1109 IEM_MC_END();
1110 return VINF_SUCCESS;
1111
1112 case IEMMODE_32BIT:
1113 IEM_MC_BEGIN(0, 1);
1114 IEM_MC_LOCAL(uint32_t, u32Tmp);
1115 IEM_MC_FETCH_CR0_U32(u32Tmp);
1116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1117 IEM_MC_ADVANCE_RIP();
1118 IEM_MC_END();
1119 return VINF_SUCCESS;
1120
1121 case IEMMODE_64BIT:
1122 IEM_MC_BEGIN(0, 1);
1123 IEM_MC_LOCAL(uint64_t, u64Tmp);
1124 IEM_MC_FETCH_CR0_U64(u64Tmp);
1125 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1126 IEM_MC_ADVANCE_RIP();
1127 IEM_MC_END();
1128 return VINF_SUCCESS;
1129
1130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1131 }
1132 }
1133 else
1134 {
1135 /* Ignore operand size here, memory refs are always 16-bit. */
1136 IEM_MC_BEGIN(0, 2);
1137 IEM_MC_LOCAL(uint16_t, u16Tmp);
1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1141 IEM_MC_FETCH_CR0_U16(u16Tmp);
1142 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1143 { /* likely */ }
1144 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1145 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1146 else
1147 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1148 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1149 IEM_MC_ADVANCE_RIP();
1150 IEM_MC_END();
1151 return VINF_SUCCESS;
1152 }
1153}
1154
1155
1156/** Opcode 0x0f 0x01 /6. */
1157FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1158{
1159 /* The operand size is effectively ignored, all is 16-bit and only the
1160 lower 3-bits are used. */
1161 IEMOP_MNEMONIC(lmsw, "lmsw");
1162 IEMOP_HLP_MIN_286();
1163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1164 {
1165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1166 IEM_MC_BEGIN(1, 0);
1167 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1168 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1169 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1170 IEM_MC_END();
1171 }
1172 else
1173 {
1174 IEM_MC_BEGIN(1, 1);
1175 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1179 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1180 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1181 IEM_MC_END();
1182 }
1183 return VINF_SUCCESS;
1184}
1185
1186
1187/** Opcode 0x0f 0x01 /7. */
1188FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1189{
1190 IEMOP_MNEMONIC(invlpg, "invlpg");
1191 IEMOP_HLP_MIN_486();
1192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1193 IEM_MC_BEGIN(1, 1);
1194 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1196 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1197 IEM_MC_END();
1198 return VINF_SUCCESS;
1199}
1200
1201
1202/** Opcode 0x0f 0x01 /7. */
1203FNIEMOP_DEF(iemOp_Grp7_swapgs)
1204{
1205 IEMOP_MNEMONIC(swapgs, "swapgs");
1206 IEMOP_HLP_ONLY_64BIT();
1207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1208 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1209}
1210
1211
1212/** Opcode 0x0f 0x01 /7. */
1213FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1214{
1215 NOREF(pVCpu);
1216 IEMOP_BITCH_ABOUT_STUB();
1217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1218}
1219
1220
1221/** Opcode 0x0f 0x01. */
1222FNIEMOP_DEF(iemOp_Grp7)
1223{
1224 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1226 {
1227 case 0:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1233 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1234 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1235 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1236 }
1237 return IEMOP_RAISE_INVALID_OPCODE();
1238
1239 case 1:
1240 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1241 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1242 switch (bRm & X86_MODRM_RM_MASK)
1243 {
1244 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1245 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1246 }
1247 return IEMOP_RAISE_INVALID_OPCODE();
1248
1249 case 2:
1250 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1251 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1252 switch (bRm & X86_MODRM_RM_MASK)
1253 {
1254 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1255 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1256 }
1257 return IEMOP_RAISE_INVALID_OPCODE();
1258
1259 case 3:
1260 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1261 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1262 switch (bRm & X86_MODRM_RM_MASK)
1263 {
1264 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1265 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1266 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1267 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1268 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1269 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1270 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1271 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1273 }
1274
1275 case 4:
1276 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1277
1278 case 5:
1279 return IEMOP_RAISE_INVALID_OPCODE();
1280
1281 case 6:
1282 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1283
1284 case 7:
1285 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1286 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1287 switch (bRm & X86_MODRM_RM_MASK)
1288 {
1289 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1290 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1291 }
1292 return IEMOP_RAISE_INVALID_OPCODE();
1293
1294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1295 }
1296}
1297
1298/** Opcode 0x0f 0x00 /3. */
1299FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1300{
1301 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1303
1304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1305 {
1306 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1307 switch (pVCpu->iem.s.enmEffOpSize)
1308 {
1309 case IEMMODE_16BIT:
1310 {
1311 IEM_MC_BEGIN(3, 0);
1312 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1313 IEM_MC_ARG(uint16_t, u16Sel, 1);
1314 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1315
1316 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1318 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1319
1320 IEM_MC_END();
1321 return VINF_SUCCESS;
1322 }
1323
1324 case IEMMODE_32BIT:
1325 case IEMMODE_64BIT:
1326 {
1327 IEM_MC_BEGIN(3, 0);
1328 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1329 IEM_MC_ARG(uint16_t, u16Sel, 1);
1330 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1331
1332 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1333 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1334 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1335
1336 IEM_MC_END();
1337 return VINF_SUCCESS;
1338 }
1339
1340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1341 }
1342 }
1343 else
1344 {
1345 switch (pVCpu->iem.s.enmEffOpSize)
1346 {
1347 case IEMMODE_16BIT:
1348 {
1349 IEM_MC_BEGIN(3, 1);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Sel, 1);
1352 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1354
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1356 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1357
1358 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1360 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1361
1362 IEM_MC_END();
1363 return VINF_SUCCESS;
1364 }
1365
1366 case IEMMODE_32BIT:
1367 case IEMMODE_64BIT:
1368 {
1369 IEM_MC_BEGIN(3, 1);
1370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1371 IEM_MC_ARG(uint16_t, u16Sel, 1);
1372 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1374
1375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1376 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1377/** @todo testcase: make sure it's a 16-bit read. */
1378
1379 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1380 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1381 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1382
1383 IEM_MC_END();
1384 return VINF_SUCCESS;
1385 }
1386
1387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1388 }
1389 }
1390}
1391
1392
1393
1394/** Opcode 0x0f 0x02. */
1395FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1396{
1397 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1398 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1399}
1400
1401
1402/** Opcode 0x0f 0x03. */
1403FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1404{
1405 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1406 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1407}
1408
1409
1410/** Opcode 0x0f 0x05. */
1411FNIEMOP_DEF(iemOp_syscall)
1412{
1413 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1416}
1417
1418
1419/** Opcode 0x0f 0x06. */
1420FNIEMOP_DEF(iemOp_clts)
1421{
1422 IEMOP_MNEMONIC(clts, "clts");
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1425}
1426
1427
1428/** Opcode 0x0f 0x07. */
1429FNIEMOP_DEF(iemOp_sysret)
1430{
1431 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1434}
1435
1436
1437/** Opcode 0x0f 0x08. */
1438FNIEMOP_STUB(iemOp_invd);
1439// IEMOP_HLP_MIN_486();
1440
1441
1442/** Opcode 0x0f 0x09. */
1443FNIEMOP_DEF(iemOp_wbinvd)
1444{
1445 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1446 IEMOP_HLP_MIN_486();
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_BEGIN(0, 0);
1449 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1450 IEM_MC_ADVANCE_RIP();
1451 IEM_MC_END();
1452 return VINF_SUCCESS; /* ignore for now */
1453}
1454
1455
1456/** Opcode 0x0f 0x0b. */
1457FNIEMOP_DEF(iemOp_ud2)
1458{
1459 IEMOP_MNEMONIC(ud2, "ud2");
1460 return IEMOP_RAISE_INVALID_OPCODE();
1461}
1462
1463/** Opcode 0x0f 0x0d. */
1464FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1465{
1466 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1467 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1468 {
1469 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471 }
1472
1473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1475 {
1476 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1477 return IEMOP_RAISE_INVALID_OPCODE();
1478 }
1479
1480 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1481 {
1482 case 2: /* Aliased to /0 for the time being. */
1483 case 4: /* Aliased to /0 for the time being. */
1484 case 5: /* Aliased to /0 for the time being. */
1485 case 6: /* Aliased to /0 for the time being. */
1486 case 7: /* Aliased to /0 for the time being. */
1487 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1488 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1489 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1491 }
1492
1493 IEM_MC_BEGIN(0, 1);
1494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 /* Currently a NOP. */
1498 NOREF(GCPtrEffSrc);
1499 IEM_MC_ADVANCE_RIP();
1500 IEM_MC_END();
1501 return VINF_SUCCESS;
1502}
1503
1504
1505/** Opcode 0x0f 0x0e. */
1506FNIEMOP_STUB(iemOp_femms);
1507
1508
1509/** Opcode 0x0f 0x0f 0x0c. */
1510FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x0d. */
1513FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x1c. */
1516FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x1d. */
1519FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x8a. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x8e. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x90. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x94. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1532
1533/** Opcode 0x0f 0x0f 0x96. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0x97. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0x9a. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0x9e. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xa0. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xa4. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xa6. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xa7. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xaa. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xae. */
1561FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xb0. */
1564FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xb4. */
1567FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1568
1569/** Opcode 0x0f 0x0f 0xb6. */
1570FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1571
1572/** Opcode 0x0f 0x0f 0xb7. */
1573FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1574
1575/** Opcode 0x0f 0x0f 0xbb. */
1576FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1577
1578/** Opcode 0x0f 0x0f 0xbf. */
1579FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1580
1581
1582/** Opcode 0x0f 0x0f. */
1583FNIEMOP_DEF(iemOp_3Dnow)
1584{
1585 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1586 {
1587 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1588 return IEMOP_RAISE_INVALID_OPCODE();
1589 }
1590
1591 /* This is pretty sparse, use switch instead of table. */
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 switch (b)
1594 {
1595 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1596 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1597 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1598 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1599 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1600 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1601 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1602 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1603 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1604 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1605 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1606 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1607 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1608 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1609 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1610 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1611 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1612 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1613 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1614 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1615 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1616 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1617 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1618 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1619 default:
1620 return IEMOP_RAISE_INVALID_OPCODE();
1621 }
1622}
1623
1624
1625/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1626FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1627/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1628FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1629/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1630FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1631/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1632FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1633
1634
1635/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1636FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1637{
1638 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1640 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1641 {
1642 /*
1643 * Register, register.
1644 */
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1646 IEM_MC_BEGIN(0, 0);
1647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1649 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1650 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1651 IEM_MC_ADVANCE_RIP();
1652 IEM_MC_END();
1653 }
1654 else
1655 {
1656 /*
1657 * Memory, register.
1658 */
1659 IEM_MC_BEGIN(0, 2);
1660 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1662
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1665 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1666 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1667
1668 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1669 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1670
1671 IEM_MC_ADVANCE_RIP();
1672 IEM_MC_END();
1673 }
1674 return VINF_SUCCESS;
1675}
1676
1677
1678/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1679FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1680
1681/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1682FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1683
1684/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1685FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1686{
1687 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1688 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1689 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1690 {
1691 /*
1692 * Register, register.
1693 */
1694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1695 IEM_MC_BEGIN(0, 1);
1696 IEM_MC_LOCAL(uint64_t, uSrc);
1697
1698 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1699 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1700 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1701 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1702
1703 IEM_MC_ADVANCE_RIP();
1704 IEM_MC_END();
1705 }
1706 else
1707 {
1708 /*
1709 * Memory, register.
1710 */
1711 IEM_MC_BEGIN(0, 2);
1712 IEM_MC_LOCAL(uint64_t, uSrc);
1713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1714
1715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1717 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1718 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1719
1720 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1721 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1722
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 }
1726 return VINF_SUCCESS;
1727}
1728
1729
1730/** Opcode 0x0f 0x12. */
1731FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1732
1733/** Opcode 0x66 0x0f 0x12. */
1734FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1735
1736/** Opcode 0xf3 0x0f 0x12. */
1737FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1738
1739/** Opcode 0xf2 0x0f 0x12. */
1740FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1741
1742/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1743FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1744
1745/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1746FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1747{
1748 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1751 {
1752#if 0
1753 /*
1754 * Register, register.
1755 */
1756 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1757 IEM_MC_BEGIN(0, 1);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1761 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1762 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1763 IEM_MC_ADVANCE_RIP();
1764 IEM_MC_END();
1765#else
1766 return IEMOP_RAISE_INVALID_OPCODE();
1767#endif
1768 }
1769 else
1770 {
1771 /*
1772 * Memory, register.
1773 */
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1780 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1781 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1782
1783 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1784 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1785
1786 IEM_MC_ADVANCE_RIP();
1787 IEM_MC_END();
1788 }
1789 return VINF_SUCCESS;
1790}
1791
1792/* Opcode 0xf3 0x0f 0x13 - invalid */
1793/* Opcode 0xf2 0x0f 0x13 - invalid */
1794
1795/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1796FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1797/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1798FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1799/* Opcode 0xf3 0x0f 0x14 - invalid */
1800/* Opcode 0xf2 0x0f 0x14 - invalid */
1801/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1802FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1803/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1804FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1805/* Opcode 0xf3 0x0f 0x15 - invalid */
1806/* Opcode 0xf2 0x0f 0x15 - invalid */
1807/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1808FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1809/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1810FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1811/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1812FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1813/* Opcode 0xf2 0x0f 0x16 - invalid */
1814/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1815FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1816/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1817FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1818/* Opcode 0xf3 0x0f 0x17 - invalid */
1819/* Opcode 0xf2 0x0f 0x17 - invalid */
1820
1821
1822/** Opcode 0x0f 0x18. */
1823FNIEMOP_DEF(iemOp_prefetch_Grp16)
1824{
1825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1826 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1827 {
1828 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1829 {
1830 case 4: /* Aliased to /0 for the time being according to AMD. */
1831 case 5: /* Aliased to /0 for the time being according to AMD. */
1832 case 6: /* Aliased to /0 for the time being according to AMD. */
1833 case 7: /* Aliased to /0 for the time being according to AMD. */
1834 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1835 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1836 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1837 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1839 }
1840
1841 IEM_MC_BEGIN(0, 1);
1842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1845 /* Currently a NOP. */
1846 NOREF(GCPtrEffSrc);
1847 IEM_MC_ADVANCE_RIP();
1848 IEM_MC_END();
1849 return VINF_SUCCESS;
1850 }
1851
1852 return IEMOP_RAISE_INVALID_OPCODE();
1853}
1854
1855
1856/** Opcode 0x0f 0x19..0x1f. */
1857FNIEMOP_DEF(iemOp_nop_Ev)
1858{
1859 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1862 {
1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1864 IEM_MC_BEGIN(0, 0);
1865 IEM_MC_ADVANCE_RIP();
1866 IEM_MC_END();
1867 }
1868 else
1869 {
1870 IEM_MC_BEGIN(0, 1);
1871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1874 /* Currently a NOP. */
1875 NOREF(GCPtrEffSrc);
1876 IEM_MC_ADVANCE_RIP();
1877 IEM_MC_END();
1878 }
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** Opcode 0x0f 0x20. */
1884FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1885{
1886 /* mod is ignored, as is operand size overrides. */
1887 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1888 IEMOP_HLP_MIN_386();
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1891 else
1892 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1893
1894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1895 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1897 {
1898 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1899 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1900 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1901 iCrReg |= 8;
1902 }
1903 switch (iCrReg)
1904 {
1905 case 0: case 2: case 3: case 4: case 8:
1906 break;
1907 default:
1908 return IEMOP_RAISE_INVALID_OPCODE();
1909 }
1910 IEMOP_HLP_DONE_DECODING();
1911
1912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1913}
1914
1915
1916/** Opcode 0x0f 0x21. */
1917FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1918{
1919 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1920 IEMOP_HLP_MIN_386();
1921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1923 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1924 return IEMOP_RAISE_INVALID_OPCODE();
1925 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1926 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1927 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1928}
1929
1930
1931/** Opcode 0x0f 0x22. */
1932FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1933{
1934 /* mod is ignored, as is operand size overrides. */
1935 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1936 IEMOP_HLP_MIN_386();
1937 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1938 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1939 else
1940 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1941
1942 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1943 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1944 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1945 {
1946 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1948 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1949 iCrReg |= 8;
1950 }
1951 switch (iCrReg)
1952 {
1953 case 0: case 2: case 3: case 4: case 8:
1954 break;
1955 default:
1956 return IEMOP_RAISE_INVALID_OPCODE();
1957 }
1958 IEMOP_HLP_DONE_DECODING();
1959
1960 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1961}
1962
1963
1964/** Opcode 0x0f 0x23. */
1965FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1966{
1967 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1968 IEMOP_HLP_MIN_386();
1969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1972 return IEMOP_RAISE_INVALID_OPCODE();
1973 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1974 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1975 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1976}
1977
1978
1979/** Opcode 0x0f 0x24. */
1980FNIEMOP_DEF(iemOp_mov_Rd_Td)
1981{
1982 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1983 /** @todo works on 386 and 486. */
1984 /* The RM byte is not considered, see testcase. */
1985 return IEMOP_RAISE_INVALID_OPCODE();
1986}
1987
1988
1989/** Opcode 0x0f 0x26. */
1990FNIEMOP_DEF(iemOp_mov_Td_Rd)
1991{
1992 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1993 /** @todo works on 386 and 486. */
1994 /* The RM byte is not considered, see testcase. */
1995 return IEMOP_RAISE_INVALID_OPCODE();
1996}
1997
1998
1999/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
2000FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2001{
2002 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
2003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2004 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2005 {
2006 /*
2007 * Register, register.
2008 */
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_BEGIN(0, 0);
2011 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2012 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2013 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2014 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2015 IEM_MC_ADVANCE_RIP();
2016 IEM_MC_END();
2017 }
2018 else
2019 {
2020 /*
2021 * Register, memory.
2022 */
2023 IEM_MC_BEGIN(0, 2);
2024 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2026
2027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2030 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2031
2032 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2034
2035 IEM_MC_ADVANCE_RIP();
2036 IEM_MC_END();
2037 }
2038 return VINF_SUCCESS;
2039}
2040
2041/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2042FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2043{
2044 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2045 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2046 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2047 {
2048 /*
2049 * Register, register.
2050 */
2051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2052 IEM_MC_BEGIN(0, 0);
2053 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2055 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2056 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2057 IEM_MC_ADVANCE_RIP();
2058 IEM_MC_END();
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 IEM_MC_BEGIN(0, 2);
2066 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2068
2069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2072 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2073
2074 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2075 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2076
2077 IEM_MC_ADVANCE_RIP();
2078 IEM_MC_END();
2079 }
2080 return VINF_SUCCESS;
2081}
2082
2083/* Opcode 0xf3 0x0f 0x28 - invalid */
2084/* Opcode 0xf2 0x0f 0x28 - invalid */
2085
2086/** Opcode 0x0f 0x29. */
2087FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2088{
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2091 else
2092 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2094 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2095 {
2096 /*
2097 * Register, register.
2098 */
2099 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2100 IEM_MC_BEGIN(0, 0);
2101 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2102 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2103 else
2104 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2105 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2106 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2107 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 else
2112 {
2113 /*
2114 * Memory, register.
2115 */
2116 IEM_MC_BEGIN(0, 2);
2117 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2119
2120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2121 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2122 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2123 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2124 else
2125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2127
2128 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2129 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2130
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 }
2134 return VINF_SUCCESS;
2135}
2136
2137
2138/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2139FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2140/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2141FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2142/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2143FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2144/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2145FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2146
2147
2148/** Opcode 0x0f 0x2b. */
2149FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2150{
2151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2152 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2153 else
2154 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2156 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2157 {
2158 /*
2159 * memory, register.
2160 */
2161 IEM_MC_BEGIN(0, 2);
2162 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2168 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2169 else
2170 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2171 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2172
2173 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2174 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2175
2176 IEM_MC_ADVANCE_RIP();
2177 IEM_MC_END();
2178 }
2179 /* The register, register encoding is invalid. */
2180 else
2181 return IEMOP_RAISE_INVALID_OPCODE();
2182 return VINF_SUCCESS;
2183}
2184
2185
2186/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2187FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2188/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2189FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2190/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2191FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2192/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2193FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2194
2195/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2196FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2197/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2198FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2199/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2200FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2201/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2202FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2203
2204/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2205FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2206/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2207FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2208/* Opcode 0xf3 0x0f 0x2e - invalid */
2209/* Opcode 0xf2 0x0f 0x2e - invalid */
2210
2211/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2212FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2213/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2214FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2215/* Opcode 0xf3 0x0f 0x2f - invalid */
2216/* Opcode 0xf2 0x0f 0x2f - invalid */
2217
2218/** Opcode 0x0f 0x30. */
2219FNIEMOP_DEF(iemOp_wrmsr)
2220{
2221 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2224}
2225
2226
2227/** Opcode 0x0f 0x31. */
2228FNIEMOP_DEF(iemOp_rdtsc)
2229{
2230 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2233}
2234
2235
2236/** Opcode 0x0f 0x33. */
2237FNIEMOP_DEF(iemOp_rdmsr)
2238{
2239 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2242}
2243
2244
2245/** Opcode 0x0f 0x34. */
2246FNIEMOP_STUB(iemOp_rdpmc);
2247/** Opcode 0x0f 0x34. */
2248FNIEMOP_STUB(iemOp_sysenter);
2249/** Opcode 0x0f 0x35. */
2250FNIEMOP_STUB(iemOp_sysexit);
2251/** Opcode 0x0f 0x37. */
2252FNIEMOP_STUB(iemOp_getsec);
2253/** Opcode 0x0f 0x38. */
2254FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2255/** Opcode 0x0f 0x3a. */
2256FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2257
2258
2259/**
2260 * Implements a conditional move.
2261 *
2262 * Wish there was an obvious way to do this where we could share and reduce
2263 * code bloat.
2264 *
2265 * @param a_Cnd The conditional "microcode" operation.
2266 */
2267#define CMOV_X(a_Cnd) \
2268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2270 { \
2271 switch (pVCpu->iem.s.enmEffOpSize) \
2272 { \
2273 case IEMMODE_16BIT: \
2274 IEM_MC_BEGIN(0, 1); \
2275 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2276 a_Cnd { \
2277 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2278 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2279 } IEM_MC_ENDIF(); \
2280 IEM_MC_ADVANCE_RIP(); \
2281 IEM_MC_END(); \
2282 return VINF_SUCCESS; \
2283 \
2284 case IEMMODE_32BIT: \
2285 IEM_MC_BEGIN(0, 1); \
2286 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2287 a_Cnd { \
2288 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2289 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2290 } IEM_MC_ELSE() { \
2291 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2292 } IEM_MC_ENDIF(); \
2293 IEM_MC_ADVANCE_RIP(); \
2294 IEM_MC_END(); \
2295 return VINF_SUCCESS; \
2296 \
2297 case IEMMODE_64BIT: \
2298 IEM_MC_BEGIN(0, 1); \
2299 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2300 a_Cnd { \
2301 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2302 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2303 } IEM_MC_ENDIF(); \
2304 IEM_MC_ADVANCE_RIP(); \
2305 IEM_MC_END(); \
2306 return VINF_SUCCESS; \
2307 \
2308 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2309 } \
2310 } \
2311 else \
2312 { \
2313 switch (pVCpu->iem.s.enmEffOpSize) \
2314 { \
2315 case IEMMODE_16BIT: \
2316 IEM_MC_BEGIN(0, 2); \
2317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2318 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2320 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2321 a_Cnd { \
2322 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2323 } IEM_MC_ENDIF(); \
2324 IEM_MC_ADVANCE_RIP(); \
2325 IEM_MC_END(); \
2326 return VINF_SUCCESS; \
2327 \
2328 case IEMMODE_32BIT: \
2329 IEM_MC_BEGIN(0, 2); \
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2331 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2333 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2334 a_Cnd { \
2335 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2336 } IEM_MC_ELSE() { \
2337 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2338 } IEM_MC_ENDIF(); \
2339 IEM_MC_ADVANCE_RIP(); \
2340 IEM_MC_END(); \
2341 return VINF_SUCCESS; \
2342 \
2343 case IEMMODE_64BIT: \
2344 IEM_MC_BEGIN(0, 2); \
2345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2346 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2348 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2349 a_Cnd { \
2350 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2351 } IEM_MC_ENDIF(); \
2352 IEM_MC_ADVANCE_RIP(); \
2353 IEM_MC_END(); \
2354 return VINF_SUCCESS; \
2355 \
2356 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2357 } \
2358 } do {} while (0)
2359
2360
2361
2362/** Opcode 0x0f 0x40. */
2363FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2364{
2365 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2366 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2367}
2368
2369
2370/** Opcode 0x0f 0x41. */
2371FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2372{
2373 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2374 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2375}
2376
2377
2378/** Opcode 0x0f 0x42. */
2379FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2380{
2381 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2382 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2383}
2384
2385
2386/** Opcode 0x0f 0x43. */
2387FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2388{
2389 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2390 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2391}
2392
2393
2394/** Opcode 0x0f 0x44. */
2395FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2396{
2397 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2398 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2399}
2400
2401
2402/** Opcode 0x0f 0x45. */
2403FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2404{
2405 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2406 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2407}
2408
2409
2410/** Opcode 0x0f 0x46. */
2411FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2412{
2413 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2414 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2415}
2416
2417
2418/** Opcode 0x0f 0x47. */
2419FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2420{
2421 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2422 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2423}
2424
2425
2426/** Opcode 0x0f 0x48. */
2427FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2428{
2429 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2430 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2431}
2432
2433
2434/** Opcode 0x0f 0x49. */
2435FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2436{
2437 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2438 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2439}
2440
2441
2442/** Opcode 0x0f 0x4a. */
2443FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2444{
2445 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2446 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2447}
2448
2449
2450/** Opcode 0x0f 0x4b. */
2451FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2452{
2453 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2454 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2455}
2456
2457
2458/** Opcode 0x0f 0x4c. */
2459FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2460{
2461 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2462 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2463}
2464
2465
2466/** Opcode 0x0f 0x4d. */
2467FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2468{
2469 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2470 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2471}
2472
2473
2474/** Opcode 0x0f 0x4e. */
2475FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2476{
2477 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2478 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2479}
2480
2481
2482/** Opcode 0x0f 0x4f. */
2483FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2484{
2485 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2486 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2487}
2488
2489#undef CMOV_X
2490
2491/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2492FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2493/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2494FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2495/* Opcode 0xf3 0x0f 0x50 - invalid */
2496/* Opcode 0xf2 0x0f 0x50 - invalid */
2497
2498/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2499FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2500/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2501FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2502/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2503FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2504/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2505FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2506
2507/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2508FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2509/* Opcode 0x66 0x0f 0x52 - invalid */
2510/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2511FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2512/* Opcode 0xf2 0x0f 0x52 - invalid */
2513
2514/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2515FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2516/* Opcode 0x66 0x0f 0x53 - invalid */
2517/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2518FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2519/* Opcode 0xf2 0x0f 0x53 - invalid */
2520
2521/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2522FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2523/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2524FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2525/* Opcode 0xf3 0x0f 0x54 - invalid */
2526/* Opcode 0xf2 0x0f 0x54 - invalid */
2527
2528/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2529FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2530/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2531FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2532/* Opcode 0xf3 0x0f 0x55 - invalid */
2533/* Opcode 0xf2 0x0f 0x55 - invalid */
2534
2535/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2536FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2537/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2538FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2539/* Opcode 0xf3 0x0f 0x56 - invalid */
2540/* Opcode 0xf2 0x0f 0x56 - invalid */
2541
2542/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2543FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2544/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2545FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2546/* Opcode 0xf3 0x0f 0x57 - invalid */
2547/* Opcode 0xf2 0x0f 0x57 - invalid */
2548
2549/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2550FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2551/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2552FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2553/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2554FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2555/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2556FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2557
2558/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2559FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2560/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2561FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2562/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2563FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2564/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2565FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2566
2567/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2568FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2569/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2570FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2571/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2572FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2573/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2574FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2575
2576/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2577FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2578/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2579FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2580/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2581FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2582/* Opcode 0xf2 0x0f 0x5b - invalid */
2583
2584/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2585FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2586/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2587FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2588/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2589FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2590/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2591FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2592
2593/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2594FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2595/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2596FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2597/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2598FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2599/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2600FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2601
2602/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2603FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2604/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2605FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2606/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2607FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2608/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2609FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2610
2611/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2612FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2613/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2614FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2615/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2616FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2617/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2618FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2619
2620/**
2621 * Common worker for MMX instructions on the forms:
2622 * pxxxx mm1, mm2/mem32
2623 *
2624 * The 2nd operand is the first half of a register, which in the memory case
2625 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2626 * memory accessed for MMX.
2627 *
2628 * Exceptions type 4.
2629 */
2630FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2631{
2632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2634 {
2635 /*
2636 * Register, register.
2637 */
2638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2639 IEM_MC_BEGIN(2, 0);
2640 IEM_MC_ARG(uint128_t *, pDst, 0);
2641 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2642 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2643 IEM_MC_PREPARE_SSE_USAGE();
2644 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2645 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2646 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2647 IEM_MC_ADVANCE_RIP();
2648 IEM_MC_END();
2649 }
2650 else
2651 {
2652 /*
2653 * Register, memory.
2654 */
2655 IEM_MC_BEGIN(2, 2);
2656 IEM_MC_ARG(uint128_t *, pDst, 0);
2657 IEM_MC_LOCAL(uint64_t, uSrc);
2658 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2660
2661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2663 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2664 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2665
2666 IEM_MC_PREPARE_SSE_USAGE();
2667 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2668 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2669
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674}
2675
2676
2677/**
2678 * Common worker for SSE2 instructions on the forms:
2679 * pxxxx xmm1, xmm2/mem128
2680 *
2681 * The 2nd operand is the first half of a register, which in the memory case
2682 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2683 * memory accessed for MMX.
2684 *
2685 * Exceptions type 4.
2686 */
2687FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2688{
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (!pImpl->pfnU64)
2691 return IEMOP_RAISE_INVALID_OPCODE();
2692 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2693 {
2694 /*
2695 * Register, register.
2696 */
2697 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2698 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2700 IEM_MC_BEGIN(2, 0);
2701 IEM_MC_ARG(uint64_t *, pDst, 0);
2702 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2703 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2704 IEM_MC_PREPARE_FPU_USAGE();
2705 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2706 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2707 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2708 IEM_MC_ADVANCE_RIP();
2709 IEM_MC_END();
2710 }
2711 else
2712 {
2713 /*
2714 * Register, memory.
2715 */
2716 IEM_MC_BEGIN(2, 2);
2717 IEM_MC_ARG(uint64_t *, pDst, 0);
2718 IEM_MC_LOCAL(uint32_t, uSrc);
2719 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2721
2722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2725 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2726
2727 IEM_MC_PREPARE_FPU_USAGE();
2728 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2729 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2730
2731 IEM_MC_ADVANCE_RIP();
2732 IEM_MC_END();
2733 }
2734 return VINF_SUCCESS;
2735}
2736
2737
2738/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2739FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2740{
2741 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2742 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2743}
2744
2745/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2746FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2747{
2748 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2749 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2750}
2751
2752/* Opcode 0xf3 0x0f 0x60 - invalid */
2753
2754
2755/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2756FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2757{
2758 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2759 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2760}
2761
2762/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2763FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2764{
2765 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2766 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2767}
2768
2769/* Opcode 0xf3 0x0f 0x61 - invalid */
2770
2771
2772/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2773FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2774{
2775 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2776 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2777}
2778
2779/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2780FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2781{
2782 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2783 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2784}
2785
2786/* Opcode 0xf3 0x0f 0x62 - invalid */
2787
2788
2789
2790/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2791FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x63 - invalid */
2795
2796/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x64 - invalid */
2801
2802/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2803FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2805FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2806/* Opcode 0xf3 0x0f 0x65 - invalid */
2807
2808/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2809FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2810/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2811FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2812/* Opcode 0xf3 0x0f 0x66 - invalid */
2813
2814/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2815FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2816/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2817FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2818/* Opcode 0xf3 0x0f 0x67 - invalid */
2819
2820
2821/**
2822 * Common worker for SSE2 and MMX instructions on the forms:
2823 * pxxxx xmm1, xmm2/mem128
2824 * pxxxx mm1, mm2/mem64
2825 *
2826 * The 2nd operand is the second half of a register, which in the memory case
2827 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2828 * where it may read the full 128 bits or only the upper 64 bits.
2829 *
2830 * Exceptions type 4.
2831 */
2832FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2833{
2834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2835 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2836 {
2837 case IEM_OP_PRF_SIZE_OP: /* SSE */
2838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2839 {
2840 /*
2841 * Register, register.
2842 */
2843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2844 IEM_MC_BEGIN(2, 0);
2845 IEM_MC_ARG(uint128_t *, pDst, 0);
2846 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2847 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2848 IEM_MC_PREPARE_SSE_USAGE();
2849 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2850 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2851 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2852 IEM_MC_ADVANCE_RIP();
2853 IEM_MC_END();
2854 }
2855 else
2856 {
2857 /*
2858 * Register, memory.
2859 */
2860 IEM_MC_BEGIN(2, 2);
2861 IEM_MC_ARG(uint128_t *, pDst, 0);
2862 IEM_MC_LOCAL(uint128_t, uSrc);
2863 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2865
2866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2868 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2869 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2870
2871 IEM_MC_PREPARE_SSE_USAGE();
2872 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2873 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2874
2875 IEM_MC_ADVANCE_RIP();
2876 IEM_MC_END();
2877 }
2878 return VINF_SUCCESS;
2879
2880 case 0: /* MMX */
2881 if (!pImpl->pfnU64)
2882 return IEMOP_RAISE_INVALID_OPCODE();
2883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2884 {
2885 /*
2886 * Register, register.
2887 */
2888 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2889 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2891 IEM_MC_BEGIN(2, 0);
2892 IEM_MC_ARG(uint64_t *, pDst, 0);
2893 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2894 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2895 IEM_MC_PREPARE_FPU_USAGE();
2896 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2897 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2898 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2899 IEM_MC_ADVANCE_RIP();
2900 IEM_MC_END();
2901 }
2902 else
2903 {
2904 /*
2905 * Register, memory.
2906 */
2907 IEM_MC_BEGIN(2, 2);
2908 IEM_MC_ARG(uint64_t *, pDst, 0);
2909 IEM_MC_LOCAL(uint64_t, uSrc);
2910 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2912
2913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2915 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2916 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2917
2918 IEM_MC_PREPARE_FPU_USAGE();
2919 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2920 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2921
2922 IEM_MC_ADVANCE_RIP();
2923 IEM_MC_END();
2924 }
2925 return VINF_SUCCESS;
2926
2927 default:
2928 return IEMOP_RAISE_INVALID_OPCODE();
2929 }
2930}
2931
2932
2933/** Opcode 0x0f 0x68. */
2934FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2935{
2936 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2937 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2938}
2939
2940
2941/** Opcode 0x0f 0x69. */
2942FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2943{
2944 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2945 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2946}
2947
2948
2949/** Opcode 0x0f 0x6a. */
2950FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2951{
2952 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2953 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2954}
2955
2956/** Opcode 0x0f 0x6b. */
2957FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2958
2959
2960/* Opcode 0x0f 0x6c - invalid */
2961
2962/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2963FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2964{
2965 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2966 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2967}
2968
2969/* Opcode 0xf3 0x0f 0x6c - invalid */
2970/* Opcode 0xf2 0x0f 0x6c - invalid */
2971
2972
2973/** Opcode 0x0f 0x6d. */
2974FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2975{
2976 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2977 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2978}
2979
2980
2981/** Opcode 0x0f 0x6e. */
2982FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2983{
2984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2985 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2986 {
2987 case IEM_OP_PRF_SIZE_OP: /* SSE */
2988 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2989 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2990 else
2991 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2993 {
2994 /* XMM, greg*/
2995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2996 IEM_MC_BEGIN(0, 1);
2997 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2998 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2999 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3000 {
3001 IEM_MC_LOCAL(uint64_t, u64Tmp);
3002 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3003 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3004 }
3005 else
3006 {
3007 IEM_MC_LOCAL(uint32_t, u32Tmp);
3008 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3009 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3010 }
3011 IEM_MC_ADVANCE_RIP();
3012 IEM_MC_END();
3013 }
3014 else
3015 {
3016 /* XMM, [mem] */
3017 IEM_MC_BEGIN(0, 2);
3018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3019 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3022 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3023 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3024 {
3025 IEM_MC_LOCAL(uint64_t, u64Tmp);
3026 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3027 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3028 }
3029 else
3030 {
3031 IEM_MC_LOCAL(uint32_t, u32Tmp);
3032 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3033 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3034 }
3035 IEM_MC_ADVANCE_RIP();
3036 IEM_MC_END();
3037 }
3038 return VINF_SUCCESS;
3039
3040 case 0: /* MMX */
3041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3042 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3043 else
3044 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3046 {
3047 /* MMX, greg */
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3049 IEM_MC_BEGIN(0, 1);
3050 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3051 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3054 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3055 else
3056 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3057 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 else
3062 {
3063 /* MMX, [mem] */
3064 IEM_MC_BEGIN(0, 2);
3065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3066 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3070 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3071 {
3072 IEM_MC_LOCAL(uint64_t, u64Tmp);
3073 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3074 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3075 }
3076 else
3077 {
3078 IEM_MC_LOCAL(uint32_t, u32Tmp);
3079 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3080 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3081 }
3082 IEM_MC_ADVANCE_RIP();
3083 IEM_MC_END();
3084 }
3085 return VINF_SUCCESS;
3086
3087 default:
3088 return IEMOP_RAISE_INVALID_OPCODE();
3089 }
3090}
3091
3092
3093/** Opcode 0x0f 0x6f. */
3094FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3095{
3096 bool fAligned = false;
3097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3098 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3099 {
3100 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3101 fAligned = true;
3102 /* fall thru */
3103 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3104 if (fAligned)
3105 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3106 else
3107 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3108 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3109 {
3110 /*
3111 * Register, register.
3112 */
3113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3114 IEM_MC_BEGIN(0, 0);
3115 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3116 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3117 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3118 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3119 IEM_MC_ADVANCE_RIP();
3120 IEM_MC_END();
3121 }
3122 else
3123 {
3124 /*
3125 * Register, memory.
3126 */
3127 IEM_MC_BEGIN(0, 2);
3128 IEM_MC_LOCAL(uint128_t, u128Tmp);
3129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3130
3131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3134 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3135 if (fAligned)
3136 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3137 else
3138 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3139 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3140
3141 IEM_MC_ADVANCE_RIP();
3142 IEM_MC_END();
3143 }
3144 return VINF_SUCCESS;
3145
3146 case 0: /* MMX */
3147 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3149 {
3150 /*
3151 * Register, register.
3152 */
3153 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3154 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3156 IEM_MC_BEGIN(0, 1);
3157 IEM_MC_LOCAL(uint64_t, u64Tmp);
3158 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3159 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3160 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3161 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3162 IEM_MC_ADVANCE_RIP();
3163 IEM_MC_END();
3164 }
3165 else
3166 {
3167 /*
3168 * Register, memory.
3169 */
3170 IEM_MC_BEGIN(0, 2);
3171 IEM_MC_LOCAL(uint64_t, u64Tmp);
3172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3173
3174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3176 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3177 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3178 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3179 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3180
3181 IEM_MC_ADVANCE_RIP();
3182 IEM_MC_END();
3183 }
3184 return VINF_SUCCESS;
3185
3186 default:
3187 return IEMOP_RAISE_INVALID_OPCODE();
3188 }
3189}
3190
3191
3192/** Opcode 0x0f 0x70. The immediate here is evil! */
3193FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3194{
3195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3196 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3197 {
3198 case IEM_OP_PRF_SIZE_OP: /* SSE */
3199 case IEM_OP_PRF_REPNZ: /* SSE */
3200 case IEM_OP_PRF_REPZ: /* SSE */
3201 {
3202 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3203 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3204 {
3205 case IEM_OP_PRF_SIZE_OP:
3206 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3207 pfnAImpl = iemAImpl_pshufd;
3208 break;
3209 case IEM_OP_PRF_REPNZ:
3210 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3211 pfnAImpl = iemAImpl_pshuflw;
3212 break;
3213 case IEM_OP_PRF_REPZ:
3214 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3215 pfnAImpl = iemAImpl_pshufhw;
3216 break;
3217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3218 }
3219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3220 {
3221 /*
3222 * Register, register.
3223 */
3224 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3226
3227 IEM_MC_BEGIN(3, 0);
3228 IEM_MC_ARG(uint128_t *, pDst, 0);
3229 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3230 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3231 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3232 IEM_MC_PREPARE_SSE_USAGE();
3233 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3234 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3235 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3236 IEM_MC_ADVANCE_RIP();
3237 IEM_MC_END();
3238 }
3239 else
3240 {
3241 /*
3242 * Register, memory.
3243 */
3244 IEM_MC_BEGIN(3, 2);
3245 IEM_MC_ARG(uint128_t *, pDst, 0);
3246 IEM_MC_LOCAL(uint128_t, uSrc);
3247 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3249
3250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3251 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3252 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3254 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3255
3256 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3257 IEM_MC_PREPARE_SSE_USAGE();
3258 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3259 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3260
3261 IEM_MC_ADVANCE_RIP();
3262 IEM_MC_END();
3263 }
3264 return VINF_SUCCESS;
3265 }
3266
3267 case 0: /* MMX Extension */
3268 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3270 {
3271 /*
3272 * Register, register.
3273 */
3274 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3276
3277 IEM_MC_BEGIN(3, 0);
3278 IEM_MC_ARG(uint64_t *, pDst, 0);
3279 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3280 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3281 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3282 IEM_MC_PREPARE_FPU_USAGE();
3283 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3284 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3285 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 }
3289 else
3290 {
3291 /*
3292 * Register, memory.
3293 */
3294 IEM_MC_BEGIN(3, 2);
3295 IEM_MC_ARG(uint64_t *, pDst, 0);
3296 IEM_MC_LOCAL(uint64_t, uSrc);
3297 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3299
3300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3301 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3302 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3304 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3305
3306 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3307 IEM_MC_PREPARE_FPU_USAGE();
3308 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3309 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3310
3311 IEM_MC_ADVANCE_RIP();
3312 IEM_MC_END();
3313 }
3314 return VINF_SUCCESS;
3315
3316 default:
3317 return IEMOP_RAISE_INVALID_OPCODE();
3318 }
3319}
3320
3321
3322/** Opcode 0x0f 0x71 11/2. */
3323FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3324
3325/** Opcode 0x66 0x0f 0x71 11/2. */
3326FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3327
3328/** Opcode 0x0f 0x71 11/4. */
3329FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3330
3331/** Opcode 0x66 0x0f 0x71 11/4. */
3332FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3333
3334/** Opcode 0x0f 0x71 11/6. */
3335FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3336
3337/** Opcode 0x66 0x0f 0x71 11/6. */
3338FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3339
3340
3341/** Opcode 0x0f 0x71. */
3342FNIEMOP_DEF(iemOp_Grp12)
3343{
3344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3345 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3346 return IEMOP_RAISE_INVALID_OPCODE();
3347 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3348 {
3349 case 0: case 1: case 3: case 5: case 7:
3350 return IEMOP_RAISE_INVALID_OPCODE();
3351 case 2:
3352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3353 {
3354 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3355 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3356 default: return IEMOP_RAISE_INVALID_OPCODE();
3357 }
3358 case 4:
3359 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3360 {
3361 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3362 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3363 default: return IEMOP_RAISE_INVALID_OPCODE();
3364 }
3365 case 6:
3366 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3367 {
3368 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3369 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3370 default: return IEMOP_RAISE_INVALID_OPCODE();
3371 }
3372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3373 }
3374}
3375
3376
3377/** Opcode 0x0f 0x72 11/2. */
3378FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3379
3380/** Opcode 0x66 0x0f 0x72 11/2. */
3381FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3382
3383/** Opcode 0x0f 0x72 11/4. */
3384FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3385
3386/** Opcode 0x66 0x0f 0x72 11/4. */
3387FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3388
3389/** Opcode 0x0f 0x72 11/6. */
3390FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3391
3392/** Opcode 0x66 0x0f 0x72 11/6. */
3393FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3394
3395
3396/** Opcode 0x0f 0x72. */
3397FNIEMOP_DEF(iemOp_Grp13)
3398{
3399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3400 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3401 return IEMOP_RAISE_INVALID_OPCODE();
3402 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3403 {
3404 case 0: case 1: case 3: case 5: case 7:
3405 return IEMOP_RAISE_INVALID_OPCODE();
3406 case 2:
3407 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3408 {
3409 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3410 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3411 default: return IEMOP_RAISE_INVALID_OPCODE();
3412 }
3413 case 4:
3414 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3415 {
3416 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3417 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3418 default: return IEMOP_RAISE_INVALID_OPCODE();
3419 }
3420 case 6:
3421 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3422 {
3423 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3424 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3425 default: return IEMOP_RAISE_INVALID_OPCODE();
3426 }
3427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3428 }
3429}
3430
3431
3432/** Opcode 0x0f 0x73 11/2. */
3433FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3434
3435/** Opcode 0x66 0x0f 0x73 11/2. */
3436FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3437
3438/** Opcode 0x66 0x0f 0x73 11/3. */
3439FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3440
3441/** Opcode 0x0f 0x73 11/6. */
3442FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3443
3444/** Opcode 0x66 0x0f 0x73 11/6. */
3445FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3446
3447/** Opcode 0x66 0x0f 0x73 11/7. */
3448FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3449
3450
3451/** Opcode 0x0f 0x73. */
3452FNIEMOP_DEF(iemOp_Grp14)
3453{
3454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3455 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3456 return IEMOP_RAISE_INVALID_OPCODE();
3457 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3458 {
3459 case 0: case 1: case 4: case 5:
3460 return IEMOP_RAISE_INVALID_OPCODE();
3461 case 2:
3462 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3463 {
3464 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3465 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3466 default: return IEMOP_RAISE_INVALID_OPCODE();
3467 }
3468 case 3:
3469 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3470 {
3471 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3472 default: return IEMOP_RAISE_INVALID_OPCODE();
3473 }
3474 case 6:
3475 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3476 {
3477 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3478 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3479 default: return IEMOP_RAISE_INVALID_OPCODE();
3480 }
3481 case 7:
3482 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3483 {
3484 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3485 default: return IEMOP_RAISE_INVALID_OPCODE();
3486 }
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489}
3490
3491
3492/**
3493 * Common worker for SSE2 and MMX instructions on the forms:
3494 * pxxx mm1, mm2/mem64
3495 * pxxx xmm1, xmm2/mem128
3496 *
3497 * Proper alignment of the 128-bit operand is enforced.
3498 * Exceptions type 4. SSE2 and MMX cpuid checks.
3499 */
3500FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3501{
3502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3503 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3504 {
3505 case IEM_OP_PRF_SIZE_OP: /* SSE */
3506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3507 {
3508 /*
3509 * Register, register.
3510 */
3511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3512 IEM_MC_BEGIN(2, 0);
3513 IEM_MC_ARG(uint128_t *, pDst, 0);
3514 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3515 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3516 IEM_MC_PREPARE_SSE_USAGE();
3517 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3518 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3519 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3520 IEM_MC_ADVANCE_RIP();
3521 IEM_MC_END();
3522 }
3523 else
3524 {
3525 /*
3526 * Register, memory.
3527 */
3528 IEM_MC_BEGIN(2, 2);
3529 IEM_MC_ARG(uint128_t *, pDst, 0);
3530 IEM_MC_LOCAL(uint128_t, uSrc);
3531 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3533
3534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3537 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3538
3539 IEM_MC_PREPARE_SSE_USAGE();
3540 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3541 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3542
3543 IEM_MC_ADVANCE_RIP();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547
3548 case 0: /* MMX */
3549 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3550 {
3551 /*
3552 * Register, register.
3553 */
3554 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3555 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_BEGIN(2, 0);
3558 IEM_MC_ARG(uint64_t *, pDst, 0);
3559 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3560 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3561 IEM_MC_PREPARE_FPU_USAGE();
3562 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3563 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3564 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3565 IEM_MC_ADVANCE_RIP();
3566 IEM_MC_END();
3567 }
3568 else
3569 {
3570 /*
3571 * Register, memory.
3572 */
3573 IEM_MC_BEGIN(2, 2);
3574 IEM_MC_ARG(uint64_t *, pDst, 0);
3575 IEM_MC_LOCAL(uint64_t, uSrc);
3576 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3578
3579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3581 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3582 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3583
3584 IEM_MC_PREPARE_FPU_USAGE();
3585 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3586 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3587
3588 IEM_MC_ADVANCE_RIP();
3589 IEM_MC_END();
3590 }
3591 return VINF_SUCCESS;
3592
3593 default:
3594 return IEMOP_RAISE_INVALID_OPCODE();
3595 }
3596}
3597
3598
3599/** Opcode 0x0f 0x74. */
3600FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3601{
3602 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3603 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3604}
3605
3606
3607/** Opcode 0x0f 0x75. */
3608FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3609{
3610 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3611 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3612}
3613
3614
3615/** Opcode 0x0f 0x76. */
3616FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3617{
3618 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3619 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3620}
3621
3622
3623/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3624FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3625/* Opcode 0x66 0x0f 0x77 - invalid */
3626/* Opcode 0xf3 0x0f 0x77 - invalid */
3627/* Opcode 0xf2 0x0f 0x77 - invalid */
3628
3629/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3630FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3631/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3632FNIEMOP_STUB(iemOp_AmdGrp17);
3633/* Opcode 0xf3 0x0f 0x78 - invalid */
3634/* Opcode 0xf2 0x0f 0x78 - invalid */
3635
3636/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3637FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3638/* Opcode 0x66 0x0f 0x79 - invalid */
3639/* Opcode 0xf3 0x0f 0x79 - invalid */
3640/* Opcode 0xf2 0x0f 0x79 - invalid */
3641
3642/* Opcode 0x0f 0x7a - invalid */
3643/* Opcode 0x66 0x0f 0x7a - invalid */
3644/* Opcode 0xf3 0x0f 0x7a - invalid */
3645/* Opcode 0xf2 0x0f 0x7a - invalid */
3646
3647/* Opcode 0x0f 0x7b - invalid */
3648/* Opcode 0x66 0x0f 0x7b - invalid */
3649/* Opcode 0xf3 0x0f 0x7b - invalid */
3650/* Opcode 0xf2 0x0f 0x7b - invalid */
3651
3652/* Opcode 0x0f 0x7c - invalid */
3653/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3654FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3655/* Opcode 0xf3 0x0f 0x7c - invalid */
3656/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3657FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3658
3659/* Opcode 0x0f 0x7d - invalid */
3660/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3661FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3662/* Opcode 0xf3 0x0f 0x7d - invalid */
3663/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3664FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3665
3666
3667/** Opcode 0x0f 0x7e. */
3668FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3669{
3670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3671 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3672 {
3673 case IEM_OP_PRF_SIZE_OP: /* SSE */
3674 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3675 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3676 else
3677 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3678 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3679 {
3680 /* greg, XMM */
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_BEGIN(0, 1);
3683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3684 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3685 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3686 {
3687 IEM_MC_LOCAL(uint64_t, u64Tmp);
3688 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3689 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3690 }
3691 else
3692 {
3693 IEM_MC_LOCAL(uint32_t, u32Tmp);
3694 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3695 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3696 }
3697 IEM_MC_ADVANCE_RIP();
3698 IEM_MC_END();
3699 }
3700 else
3701 {
3702 /* [mem], XMM */
3703 IEM_MC_BEGIN(0, 2);
3704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3708 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3709 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3710 {
3711 IEM_MC_LOCAL(uint64_t, u64Tmp);
3712 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3713 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3714 }
3715 else
3716 {
3717 IEM_MC_LOCAL(uint32_t, u32Tmp);
3718 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3719 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3720 }
3721 IEM_MC_ADVANCE_RIP();
3722 IEM_MC_END();
3723 }
3724 return VINF_SUCCESS;
3725
3726 case 0: /* MMX */
3727 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3728 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3729 else
3730 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3732 {
3733 /* greg, MMX */
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_BEGIN(0, 1);
3736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3737 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3739 {
3740 IEM_MC_LOCAL(uint64_t, u64Tmp);
3741 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3742 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3743 }
3744 else
3745 {
3746 IEM_MC_LOCAL(uint32_t, u32Tmp);
3747 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3748 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3749 }
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 }
3753 else
3754 {
3755 /* [mem], MMX */
3756 IEM_MC_BEGIN(0, 2);
3757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3761 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3762 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3763 {
3764 IEM_MC_LOCAL(uint64_t, u64Tmp);
3765 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3767 }
3768 else
3769 {
3770 IEM_MC_LOCAL(uint32_t, u32Tmp);
3771 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3772 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3773 }
3774 IEM_MC_ADVANCE_RIP();
3775 IEM_MC_END();
3776 }
3777 return VINF_SUCCESS;
3778
3779 default:
3780 return IEMOP_RAISE_INVALID_OPCODE();
3781 }
3782}
3783
3784
3785/** Opcode 0x0f 0x7f. */
3786FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3787{
3788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3789 bool fAligned = false;
3790 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3791 {
3792 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3793 fAligned = true;
3794 /* fall thru */
3795 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3796 if (fAligned)
3797 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3798 else
3799 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3801 {
3802 /*
3803 * Register, register.
3804 */
3805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3806 IEM_MC_BEGIN(0, 0);
3807 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3808 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3809 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3810 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3811 IEM_MC_ADVANCE_RIP();
3812 IEM_MC_END();
3813 }
3814 else
3815 {
3816 /*
3817 * Register, memory.
3818 */
3819 IEM_MC_BEGIN(0, 2);
3820 IEM_MC_LOCAL(uint128_t, u128Tmp);
3821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3822
3823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3826 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3827
3828 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3829 if (fAligned)
3830 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3831 else
3832 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3833
3834 IEM_MC_ADVANCE_RIP();
3835 IEM_MC_END();
3836 }
3837 return VINF_SUCCESS;
3838
3839 case 0: /* MMX */
3840 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3841
3842 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3843 {
3844 /*
3845 * Register, register.
3846 */
3847 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3848 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3850 IEM_MC_BEGIN(0, 1);
3851 IEM_MC_LOCAL(uint64_t, u64Tmp);
3852 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3853 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3854 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3855 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3856 IEM_MC_ADVANCE_RIP();
3857 IEM_MC_END();
3858 }
3859 else
3860 {
3861 /*
3862 * Register, memory.
3863 */
3864 IEM_MC_BEGIN(0, 2);
3865 IEM_MC_LOCAL(uint64_t, u64Tmp);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3867
3868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3870 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3871 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3872
3873 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3874 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3875
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 }
3879 return VINF_SUCCESS;
3880
3881 default:
3882 return IEMOP_RAISE_INVALID_OPCODE();
3883 }
3884}
3885
3886
3887
3888/** Opcode 0x0f 0x80. */
3889FNIEMOP_DEF(iemOp_jo_Jv)
3890{
3891 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3892 IEMOP_HLP_MIN_386();
3893 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3894 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3895 {
3896 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3898
3899 IEM_MC_BEGIN(0, 0);
3900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3901 IEM_MC_REL_JMP_S16(i16Imm);
3902 } IEM_MC_ELSE() {
3903 IEM_MC_ADVANCE_RIP();
3904 } IEM_MC_ENDIF();
3905 IEM_MC_END();
3906 }
3907 else
3908 {
3909 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3911
3912 IEM_MC_BEGIN(0, 0);
3913 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3914 IEM_MC_REL_JMP_S32(i32Imm);
3915 } IEM_MC_ELSE() {
3916 IEM_MC_ADVANCE_RIP();
3917 } IEM_MC_ENDIF();
3918 IEM_MC_END();
3919 }
3920 return VINF_SUCCESS;
3921}
3922
3923
3924/** Opcode 0x0f 0x81. */
3925FNIEMOP_DEF(iemOp_jno_Jv)
3926{
3927 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3928 IEMOP_HLP_MIN_386();
3929 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3930 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3931 {
3932 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3934
3935 IEM_MC_BEGIN(0, 0);
3936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3937 IEM_MC_ADVANCE_RIP();
3938 } IEM_MC_ELSE() {
3939 IEM_MC_REL_JMP_S16(i16Imm);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942 }
3943 else
3944 {
3945 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3947
3948 IEM_MC_BEGIN(0, 0);
3949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3950 IEM_MC_ADVANCE_RIP();
3951 } IEM_MC_ELSE() {
3952 IEM_MC_REL_JMP_S32(i32Imm);
3953 } IEM_MC_ENDIF();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** Opcode 0x0f 0x82. */
3961FNIEMOP_DEF(iemOp_jc_Jv)
3962{
3963 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3964 IEMOP_HLP_MIN_386();
3965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3966 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3967 {
3968 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3970
3971 IEM_MC_BEGIN(0, 0);
3972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3973 IEM_MC_REL_JMP_S16(i16Imm);
3974 } IEM_MC_ELSE() {
3975 IEM_MC_ADVANCE_RIP();
3976 } IEM_MC_ENDIF();
3977 IEM_MC_END();
3978 }
3979 else
3980 {
3981 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3983
3984 IEM_MC_BEGIN(0, 0);
3985 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3986 IEM_MC_REL_JMP_S32(i32Imm);
3987 } IEM_MC_ELSE() {
3988 IEM_MC_ADVANCE_RIP();
3989 } IEM_MC_ENDIF();
3990 IEM_MC_END();
3991 }
3992 return VINF_SUCCESS;
3993}
3994
3995
3996/** Opcode 0x0f 0x83. */
3997FNIEMOP_DEF(iemOp_jnc_Jv)
3998{
3999 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
4000 IEMOP_HLP_MIN_386();
4001 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4002 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4003 {
4004 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4006
4007 IEM_MC_BEGIN(0, 0);
4008 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4009 IEM_MC_ADVANCE_RIP();
4010 } IEM_MC_ELSE() {
4011 IEM_MC_REL_JMP_S16(i16Imm);
4012 } IEM_MC_ENDIF();
4013 IEM_MC_END();
4014 }
4015 else
4016 {
4017 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4019
4020 IEM_MC_BEGIN(0, 0);
4021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4022 IEM_MC_ADVANCE_RIP();
4023 } IEM_MC_ELSE() {
4024 IEM_MC_REL_JMP_S32(i32Imm);
4025 } IEM_MC_ENDIF();
4026 IEM_MC_END();
4027 }
4028 return VINF_SUCCESS;
4029}
4030
4031
4032/** Opcode 0x0f 0x84. */
4033FNIEMOP_DEF(iemOp_je_Jv)
4034{
4035 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4036 IEMOP_HLP_MIN_386();
4037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4038 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4039 {
4040 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4042
4043 IEM_MC_BEGIN(0, 0);
4044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4045 IEM_MC_REL_JMP_S16(i16Imm);
4046 } IEM_MC_ELSE() {
4047 IEM_MC_ADVANCE_RIP();
4048 } IEM_MC_ENDIF();
4049 IEM_MC_END();
4050 }
4051 else
4052 {
4053 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4055
4056 IEM_MC_BEGIN(0, 0);
4057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4058 IEM_MC_REL_JMP_S32(i32Imm);
4059 } IEM_MC_ELSE() {
4060 IEM_MC_ADVANCE_RIP();
4061 } IEM_MC_ENDIF();
4062 IEM_MC_END();
4063 }
4064 return VINF_SUCCESS;
4065}
4066
4067
4068/** Opcode 0x0f 0x85. */
4069FNIEMOP_DEF(iemOp_jne_Jv)
4070{
4071 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4072 IEMOP_HLP_MIN_386();
4073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4074 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4075 {
4076 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4078
4079 IEM_MC_BEGIN(0, 0);
4080 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4081 IEM_MC_ADVANCE_RIP();
4082 } IEM_MC_ELSE() {
4083 IEM_MC_REL_JMP_S16(i16Imm);
4084 } IEM_MC_ENDIF();
4085 IEM_MC_END();
4086 }
4087 else
4088 {
4089 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4091
4092 IEM_MC_BEGIN(0, 0);
4093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4094 IEM_MC_ADVANCE_RIP();
4095 } IEM_MC_ELSE() {
4096 IEM_MC_REL_JMP_S32(i32Imm);
4097 } IEM_MC_ENDIF();
4098 IEM_MC_END();
4099 }
4100 return VINF_SUCCESS;
4101}
4102
4103
4104/** Opcode 0x0f 0x86. */
4105FNIEMOP_DEF(iemOp_jbe_Jv)
4106{
4107 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4108 IEMOP_HLP_MIN_386();
4109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4110 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4111 {
4112 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4114
4115 IEM_MC_BEGIN(0, 0);
4116 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4117 IEM_MC_REL_JMP_S16(i16Imm);
4118 } IEM_MC_ELSE() {
4119 IEM_MC_ADVANCE_RIP();
4120 } IEM_MC_ENDIF();
4121 IEM_MC_END();
4122 }
4123 else
4124 {
4125 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4127
4128 IEM_MC_BEGIN(0, 0);
4129 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4130 IEM_MC_REL_JMP_S32(i32Imm);
4131 } IEM_MC_ELSE() {
4132 IEM_MC_ADVANCE_RIP();
4133 } IEM_MC_ENDIF();
4134 IEM_MC_END();
4135 }
4136 return VINF_SUCCESS;
4137}
4138
4139
4140/** Opcode 0x0f 0x87. */
4141FNIEMOP_DEF(iemOp_jnbe_Jv)
4142{
4143 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4144 IEMOP_HLP_MIN_386();
4145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4146 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4147 {
4148 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4150
4151 IEM_MC_BEGIN(0, 0);
4152 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4153 IEM_MC_ADVANCE_RIP();
4154 } IEM_MC_ELSE() {
4155 IEM_MC_REL_JMP_S16(i16Imm);
4156 } IEM_MC_ENDIF();
4157 IEM_MC_END();
4158 }
4159 else
4160 {
4161 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4163
4164 IEM_MC_BEGIN(0, 0);
4165 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4166 IEM_MC_ADVANCE_RIP();
4167 } IEM_MC_ELSE() {
4168 IEM_MC_REL_JMP_S32(i32Imm);
4169 } IEM_MC_ENDIF();
4170 IEM_MC_END();
4171 }
4172 return VINF_SUCCESS;
4173}
4174
4175
4176/** Opcode 0x0f 0x88. */
4177FNIEMOP_DEF(iemOp_js_Jv)
4178{
4179 IEMOP_MNEMONIC(js_Jv, "js Jv");
4180 IEMOP_HLP_MIN_386();
4181 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4182 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4183 {
4184 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4186
4187 IEM_MC_BEGIN(0, 0);
4188 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4189 IEM_MC_REL_JMP_S16(i16Imm);
4190 } IEM_MC_ELSE() {
4191 IEM_MC_ADVANCE_RIP();
4192 } IEM_MC_ENDIF();
4193 IEM_MC_END();
4194 }
4195 else
4196 {
4197 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4199
4200 IEM_MC_BEGIN(0, 0);
4201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4202 IEM_MC_REL_JMP_S32(i32Imm);
4203 } IEM_MC_ELSE() {
4204 IEM_MC_ADVANCE_RIP();
4205 } IEM_MC_ENDIF();
4206 IEM_MC_END();
4207 }
4208 return VINF_SUCCESS;
4209}
4210
4211
4212/** Opcode 0x0f 0x89. */
4213FNIEMOP_DEF(iemOp_jns_Jv)
4214{
4215 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4216 IEMOP_HLP_MIN_386();
4217 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4218 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4219 {
4220 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4222
4223 IEM_MC_BEGIN(0, 0);
4224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4225 IEM_MC_ADVANCE_RIP();
4226 } IEM_MC_ELSE() {
4227 IEM_MC_REL_JMP_S16(i16Imm);
4228 } IEM_MC_ENDIF();
4229 IEM_MC_END();
4230 }
4231 else
4232 {
4233 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4235
4236 IEM_MC_BEGIN(0, 0);
4237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4238 IEM_MC_ADVANCE_RIP();
4239 } IEM_MC_ELSE() {
4240 IEM_MC_REL_JMP_S32(i32Imm);
4241 } IEM_MC_ENDIF();
4242 IEM_MC_END();
4243 }
4244 return VINF_SUCCESS;
4245}
4246
4247
4248/** Opcode 0x0f 0x8a. */
4249FNIEMOP_DEF(iemOp_jp_Jv)
4250{
4251 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4252 IEMOP_HLP_MIN_386();
4253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4254 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4255 {
4256 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4258
4259 IEM_MC_BEGIN(0, 0);
4260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4261 IEM_MC_REL_JMP_S16(i16Imm);
4262 } IEM_MC_ELSE() {
4263 IEM_MC_ADVANCE_RIP();
4264 } IEM_MC_ENDIF();
4265 IEM_MC_END();
4266 }
4267 else
4268 {
4269 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4271
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4274 IEM_MC_REL_JMP_S32(i32Imm);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_ADVANCE_RIP();
4277 } IEM_MC_ENDIF();
4278 IEM_MC_END();
4279 }
4280 return VINF_SUCCESS;
4281}
4282
4283
4284/** Opcode 0x0f 0x8b. */
4285FNIEMOP_DEF(iemOp_jnp_Jv)
4286{
4287 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4288 IEMOP_HLP_MIN_386();
4289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4290 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4291 {
4292 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4294
4295 IEM_MC_BEGIN(0, 0);
4296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4297 IEM_MC_ADVANCE_RIP();
4298 } IEM_MC_ELSE() {
4299 IEM_MC_REL_JMP_S16(i16Imm);
4300 } IEM_MC_ENDIF();
4301 IEM_MC_END();
4302 }
4303 else
4304 {
4305 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4307
4308 IEM_MC_BEGIN(0, 0);
4309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4310 IEM_MC_ADVANCE_RIP();
4311 } IEM_MC_ELSE() {
4312 IEM_MC_REL_JMP_S32(i32Imm);
4313 } IEM_MC_ENDIF();
4314 IEM_MC_END();
4315 }
4316 return VINF_SUCCESS;
4317}
4318
4319
4320/** Opcode 0x0f 0x8c. */
4321FNIEMOP_DEF(iemOp_jl_Jv)
4322{
4323 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4324 IEMOP_HLP_MIN_386();
4325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4326 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4327 {
4328 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330
4331 IEM_MC_BEGIN(0, 0);
4332 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4333 IEM_MC_REL_JMP_S16(i16Imm);
4334 } IEM_MC_ELSE() {
4335 IEM_MC_ADVANCE_RIP();
4336 } IEM_MC_ENDIF();
4337 IEM_MC_END();
4338 }
4339 else
4340 {
4341 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4343
4344 IEM_MC_BEGIN(0, 0);
4345 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4346 IEM_MC_REL_JMP_S32(i32Imm);
4347 } IEM_MC_ELSE() {
4348 IEM_MC_ADVANCE_RIP();
4349 } IEM_MC_ENDIF();
4350 IEM_MC_END();
4351 }
4352 return VINF_SUCCESS;
4353}
4354
4355
4356/** Opcode 0x0f 0x8d. */
4357FNIEMOP_DEF(iemOp_jnl_Jv)
4358{
4359 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4360 IEMOP_HLP_MIN_386();
4361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4362 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4363 {
4364 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4366
4367 IEM_MC_BEGIN(0, 0);
4368 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4369 IEM_MC_ADVANCE_RIP();
4370 } IEM_MC_ELSE() {
4371 IEM_MC_REL_JMP_S16(i16Imm);
4372 } IEM_MC_ENDIF();
4373 IEM_MC_END();
4374 }
4375 else
4376 {
4377 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4379
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4382 IEM_MC_ADVANCE_RIP();
4383 } IEM_MC_ELSE() {
4384 IEM_MC_REL_JMP_S32(i32Imm);
4385 } IEM_MC_ENDIF();
4386 IEM_MC_END();
4387 }
4388 return VINF_SUCCESS;
4389}
4390
4391
4392/** Opcode 0x0f 0x8e. */
4393FNIEMOP_DEF(iemOp_jle_Jv)
4394{
4395 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4396 IEMOP_HLP_MIN_386();
4397 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4398 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4399 {
4400 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4402
4403 IEM_MC_BEGIN(0, 0);
4404 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4405 IEM_MC_REL_JMP_S16(i16Imm);
4406 } IEM_MC_ELSE() {
4407 IEM_MC_ADVANCE_RIP();
4408 } IEM_MC_ENDIF();
4409 IEM_MC_END();
4410 }
4411 else
4412 {
4413 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4415
4416 IEM_MC_BEGIN(0, 0);
4417 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4418 IEM_MC_REL_JMP_S32(i32Imm);
4419 } IEM_MC_ELSE() {
4420 IEM_MC_ADVANCE_RIP();
4421 } IEM_MC_ENDIF();
4422 IEM_MC_END();
4423 }
4424 return VINF_SUCCESS;
4425}
4426
4427
4428/** Opcode 0x0f 0x8f. */
4429FNIEMOP_DEF(iemOp_jnle_Jv)
4430{
4431 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4432 IEMOP_HLP_MIN_386();
4433 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4434 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4435 {
4436 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4438
4439 IEM_MC_BEGIN(0, 0);
4440 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4441 IEM_MC_ADVANCE_RIP();
4442 } IEM_MC_ELSE() {
4443 IEM_MC_REL_JMP_S16(i16Imm);
4444 } IEM_MC_ENDIF();
4445 IEM_MC_END();
4446 }
4447 else
4448 {
4449 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4451
4452 IEM_MC_BEGIN(0, 0);
4453 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4454 IEM_MC_ADVANCE_RIP();
4455 } IEM_MC_ELSE() {
4456 IEM_MC_REL_JMP_S32(i32Imm);
4457 } IEM_MC_ENDIF();
4458 IEM_MC_END();
4459 }
4460 return VINF_SUCCESS;
4461}
4462
4463
4464/** Opcode 0x0f 0x90. */
4465FNIEMOP_DEF(iemOp_seto_Eb)
4466{
4467 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4468 IEMOP_HLP_MIN_386();
4469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4470
4471 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4472 * any way. AMD says it's "unused", whatever that means. We're
4473 * ignoring for now. */
4474 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4475 {
4476 /* register target */
4477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4478 IEM_MC_BEGIN(0, 0);
4479 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4480 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4481 } IEM_MC_ELSE() {
4482 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4483 } IEM_MC_ENDIF();
4484 IEM_MC_ADVANCE_RIP();
4485 IEM_MC_END();
4486 }
4487 else
4488 {
4489 /* memory target */
4490 IEM_MC_BEGIN(0, 1);
4491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4495 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4496 } IEM_MC_ELSE() {
4497 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4498 } IEM_MC_ENDIF();
4499 IEM_MC_ADVANCE_RIP();
4500 IEM_MC_END();
4501 }
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/** Opcode 0x0f 0x91. */
4507FNIEMOP_DEF(iemOp_setno_Eb)
4508{
4509 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4510 IEMOP_HLP_MIN_386();
4511 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4512
4513 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4514 * any way. AMD says it's "unused", whatever that means. We're
4515 * ignoring for now. */
4516 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4517 {
4518 /* register target */
4519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4520 IEM_MC_BEGIN(0, 0);
4521 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4522 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4523 } IEM_MC_ELSE() {
4524 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4525 } IEM_MC_ENDIF();
4526 IEM_MC_ADVANCE_RIP();
4527 IEM_MC_END();
4528 }
4529 else
4530 {
4531 /* memory target */
4532 IEM_MC_BEGIN(0, 1);
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4537 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4538 } IEM_MC_ELSE() {
4539 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4540 } IEM_MC_ENDIF();
4541 IEM_MC_ADVANCE_RIP();
4542 IEM_MC_END();
4543 }
4544 return VINF_SUCCESS;
4545}
4546
4547
4548/** Opcode 0x0f 0x92. */
4549FNIEMOP_DEF(iemOp_setc_Eb)
4550{
4551 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4552 IEMOP_HLP_MIN_386();
4553 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4554
4555 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4556 * any way. AMD says it's "unused", whatever that means. We're
4557 * ignoring for now. */
4558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4559 {
4560 /* register target */
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4562 IEM_MC_BEGIN(0, 0);
4563 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4564 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4565 } IEM_MC_ELSE() {
4566 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4567 } IEM_MC_ENDIF();
4568 IEM_MC_ADVANCE_RIP();
4569 IEM_MC_END();
4570 }
4571 else
4572 {
4573 /* memory target */
4574 IEM_MC_BEGIN(0, 1);
4575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4579 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4580 } IEM_MC_ELSE() {
4581 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4582 } IEM_MC_ENDIF();
4583 IEM_MC_ADVANCE_RIP();
4584 IEM_MC_END();
4585 }
4586 return VINF_SUCCESS;
4587}
4588
4589
4590/** Opcode 0x0f 0x93. */
4591FNIEMOP_DEF(iemOp_setnc_Eb)
4592{
4593 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4594 IEMOP_HLP_MIN_386();
4595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4596
4597 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4598 * any way. AMD says it's "unused", whatever that means. We're
4599 * ignoring for now. */
4600 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4601 {
4602 /* register target */
4603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4604 IEM_MC_BEGIN(0, 0);
4605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4606 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4607 } IEM_MC_ELSE() {
4608 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4609 } IEM_MC_ENDIF();
4610 IEM_MC_ADVANCE_RIP();
4611 IEM_MC_END();
4612 }
4613 else
4614 {
4615 /* memory target */
4616 IEM_MC_BEGIN(0, 1);
4617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4621 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4622 } IEM_MC_ELSE() {
4623 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4624 } IEM_MC_ENDIF();
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 }
4628 return VINF_SUCCESS;
4629}
4630
4631
4632/** Opcode 0x0f 0x94. */
4633FNIEMOP_DEF(iemOp_sete_Eb)
4634{
4635 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4636 IEMOP_HLP_MIN_386();
4637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4638
4639 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4640 * any way. AMD says it's "unused", whatever that means. We're
4641 * ignoring for now. */
4642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4643 {
4644 /* register target */
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646 IEM_MC_BEGIN(0, 0);
4647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4648 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4649 } IEM_MC_ELSE() {
4650 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4651 } IEM_MC_ENDIF();
4652 IEM_MC_ADVANCE_RIP();
4653 IEM_MC_END();
4654 }
4655 else
4656 {
4657 /* memory target */
4658 IEM_MC_BEGIN(0, 1);
4659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4662 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4663 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4664 } IEM_MC_ELSE() {
4665 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4666 } IEM_MC_ENDIF();
4667 IEM_MC_ADVANCE_RIP();
4668 IEM_MC_END();
4669 }
4670 return VINF_SUCCESS;
4671}
4672
4673
4674/** Opcode 0x0f 0x95. */
4675FNIEMOP_DEF(iemOp_setne_Eb)
4676{
4677 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4678 IEMOP_HLP_MIN_386();
4679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4680
4681 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4682 * any way. AMD says it's "unused", whatever that means. We're
4683 * ignoring for now. */
4684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4685 {
4686 /* register target */
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688 IEM_MC_BEGIN(0, 0);
4689 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4690 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4691 } IEM_MC_ELSE() {
4692 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4693 } IEM_MC_ENDIF();
4694 IEM_MC_ADVANCE_RIP();
4695 IEM_MC_END();
4696 }
4697 else
4698 {
4699 /* memory target */
4700 IEM_MC_BEGIN(0, 1);
4701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4704 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4705 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4706 } IEM_MC_ELSE() {
4707 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4708 } IEM_MC_ENDIF();
4709 IEM_MC_ADVANCE_RIP();
4710 IEM_MC_END();
4711 }
4712 return VINF_SUCCESS;
4713}
4714
4715
4716/** Opcode 0x0f 0x96. */
4717FNIEMOP_DEF(iemOp_setbe_Eb)
4718{
4719 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4720 IEMOP_HLP_MIN_386();
4721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4722
4723 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4724 * any way. AMD says it's "unused", whatever that means. We're
4725 * ignoring for now. */
4726 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4727 {
4728 /* register target */
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730 IEM_MC_BEGIN(0, 0);
4731 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4732 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4733 } IEM_MC_ELSE() {
4734 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4735 } IEM_MC_ENDIF();
4736 IEM_MC_ADVANCE_RIP();
4737 IEM_MC_END();
4738 }
4739 else
4740 {
4741 /* memory target */
4742 IEM_MC_BEGIN(0, 1);
4743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4746 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4747 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4748 } IEM_MC_ELSE() {
4749 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4750 } IEM_MC_ENDIF();
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 }
4754 return VINF_SUCCESS;
4755}
4756
4757
4758/** Opcode 0x0f 0x97. */
4759FNIEMOP_DEF(iemOp_setnbe_Eb)
4760{
4761 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4762 IEMOP_HLP_MIN_386();
4763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4764
4765 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4766 * any way. AMD says it's "unused", whatever that means. We're
4767 * ignoring for now. */
4768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4769 {
4770 /* register target */
4771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4772 IEM_MC_BEGIN(0, 0);
4773 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4774 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4775 } IEM_MC_ELSE() {
4776 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4777 } IEM_MC_ENDIF();
4778 IEM_MC_ADVANCE_RIP();
4779 IEM_MC_END();
4780 }
4781 else
4782 {
4783 /* memory target */
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4788 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4790 } IEM_MC_ELSE() {
4791 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4792 } IEM_MC_ENDIF();
4793 IEM_MC_ADVANCE_RIP();
4794 IEM_MC_END();
4795 }
4796 return VINF_SUCCESS;
4797}
4798
4799
4800/** Opcode 0x0f 0x98. */
4801FNIEMOP_DEF(iemOp_sets_Eb)
4802{
4803 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4804 IEMOP_HLP_MIN_386();
4805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4806
4807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4808 * any way. AMD says it's "unused", whatever that means. We're
4809 * ignoring for now. */
4810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4811 {
4812 /* register target */
4813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4814 IEM_MC_BEGIN(0, 0);
4815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4816 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4817 } IEM_MC_ELSE() {
4818 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4819 } IEM_MC_ENDIF();
4820 IEM_MC_ADVANCE_RIP();
4821 IEM_MC_END();
4822 }
4823 else
4824 {
4825 /* memory target */
4826 IEM_MC_BEGIN(0, 1);
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4831 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4832 } IEM_MC_ELSE() {
4833 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4834 } IEM_MC_ENDIF();
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 }
4838 return VINF_SUCCESS;
4839}
4840
4841
4842/** Opcode 0x0f 0x99. */
4843FNIEMOP_DEF(iemOp_setns_Eb)
4844{
4845 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4846 IEMOP_HLP_MIN_386();
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848
4849 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4850 * any way. AMD says it's "unused", whatever that means. We're
4851 * ignoring for now. */
4852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4853 {
4854 /* register target */
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4856 IEM_MC_BEGIN(0, 0);
4857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4858 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4859 } IEM_MC_ELSE() {
4860 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4861 } IEM_MC_ENDIF();
4862 IEM_MC_ADVANCE_RIP();
4863 IEM_MC_END();
4864 }
4865 else
4866 {
4867 /* memory target */
4868 IEM_MC_BEGIN(0, 1);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4874 } IEM_MC_ELSE() {
4875 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4876 } IEM_MC_ENDIF();
4877 IEM_MC_ADVANCE_RIP();
4878 IEM_MC_END();
4879 }
4880 return VINF_SUCCESS;
4881}
4882
4883
4884/** Opcode 0x0f 0x9a. */
4885FNIEMOP_DEF(iemOp_setp_Eb)
4886{
4887 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4888 IEMOP_HLP_MIN_386();
4889 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4890
4891 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4892 * any way. AMD says it's "unused", whatever that means. We're
4893 * ignoring for now. */
4894 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4895 {
4896 /* register target */
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4898 IEM_MC_BEGIN(0, 0);
4899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4900 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4901 } IEM_MC_ELSE() {
4902 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4903 } IEM_MC_ENDIF();
4904 IEM_MC_ADVANCE_RIP();
4905 IEM_MC_END();
4906 }
4907 else
4908 {
4909 /* memory target */
4910 IEM_MC_BEGIN(0, 1);
4911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4914 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4915 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4916 } IEM_MC_ELSE() {
4917 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4918 } IEM_MC_ENDIF();
4919 IEM_MC_ADVANCE_RIP();
4920 IEM_MC_END();
4921 }
4922 return VINF_SUCCESS;
4923}
4924
4925
4926/** Opcode 0x0f 0x9b. */
4927FNIEMOP_DEF(iemOp_setnp_Eb)
4928{
4929 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4930 IEMOP_HLP_MIN_386();
4931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4932
4933 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4934 * any way. AMD says it's "unused", whatever that means. We're
4935 * ignoring for now. */
4936 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4937 {
4938 /* register target */
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_BEGIN(0, 0);
4941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4942 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4943 } IEM_MC_ELSE() {
4944 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4945 } IEM_MC_ENDIF();
4946 IEM_MC_ADVANCE_RIP();
4947 IEM_MC_END();
4948 }
4949 else
4950 {
4951 /* memory target */
4952 IEM_MC_BEGIN(0, 1);
4953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4957 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4958 } IEM_MC_ELSE() {
4959 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4960 } IEM_MC_ENDIF();
4961 IEM_MC_ADVANCE_RIP();
4962 IEM_MC_END();
4963 }
4964 return VINF_SUCCESS;
4965}
4966
4967
4968/** Opcode 0x0f 0x9c. */
4969FNIEMOP_DEF(iemOp_setl_Eb)
4970{
4971 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4972 IEMOP_HLP_MIN_386();
4973 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4974
4975 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4976 * any way. AMD says it's "unused", whatever that means. We're
4977 * ignoring for now. */
4978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4979 {
4980 /* register target */
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_BEGIN(0, 0);
4983 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4984 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4985 } IEM_MC_ELSE() {
4986 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4987 } IEM_MC_ENDIF();
4988 IEM_MC_ADVANCE_RIP();
4989 IEM_MC_END();
4990 }
4991 else
4992 {
4993 /* memory target */
4994 IEM_MC_BEGIN(0, 1);
4995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4998 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4999 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5000 } IEM_MC_ELSE() {
5001 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5002 } IEM_MC_ENDIF();
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 }
5006 return VINF_SUCCESS;
5007}
5008
5009
5010/** Opcode 0x0f 0x9d. */
5011FNIEMOP_DEF(iemOp_setnl_Eb)
5012{
5013 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5014 IEMOP_HLP_MIN_386();
5015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5016
5017 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5018 * any way. AMD says it's "unused", whatever that means. We're
5019 * ignoring for now. */
5020 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5021 {
5022 /* register target */
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5024 IEM_MC_BEGIN(0, 0);
5025 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5026 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5027 } IEM_MC_ELSE() {
5028 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5029 } IEM_MC_ENDIF();
5030 IEM_MC_ADVANCE_RIP();
5031 IEM_MC_END();
5032 }
5033 else
5034 {
5035 /* memory target */
5036 IEM_MC_BEGIN(0, 1);
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5040 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5041 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5042 } IEM_MC_ELSE() {
5043 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5044 } IEM_MC_ENDIF();
5045 IEM_MC_ADVANCE_RIP();
5046 IEM_MC_END();
5047 }
5048 return VINF_SUCCESS;
5049}
5050
5051
5052/** Opcode 0x0f 0x9e. */
5053FNIEMOP_DEF(iemOp_setle_Eb)
5054{
5055 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5056 IEMOP_HLP_MIN_386();
5057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5058
5059 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5060 * any way. AMD says it's "unused", whatever that means. We're
5061 * ignoring for now. */
5062 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5063 {
5064 /* register target */
5065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5066 IEM_MC_BEGIN(0, 0);
5067 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5068 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5069 } IEM_MC_ELSE() {
5070 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5071 } IEM_MC_ENDIF();
5072 IEM_MC_ADVANCE_RIP();
5073 IEM_MC_END();
5074 }
5075 else
5076 {
5077 /* memory target */
5078 IEM_MC_BEGIN(0, 1);
5079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5082 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5083 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5084 } IEM_MC_ELSE() {
5085 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5086 } IEM_MC_ENDIF();
5087 IEM_MC_ADVANCE_RIP();
5088 IEM_MC_END();
5089 }
5090 return VINF_SUCCESS;
5091}
5092
5093
5094/** Opcode 0x0f 0x9f. */
5095FNIEMOP_DEF(iemOp_setnle_Eb)
5096{
5097 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5098 IEMOP_HLP_MIN_386();
5099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5100
5101 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5102 * any way. AMD says it's "unused", whatever that means. We're
5103 * ignoring for now. */
5104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5105 {
5106 /* register target */
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108 IEM_MC_BEGIN(0, 0);
5109 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5110 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5111 } IEM_MC_ELSE() {
5112 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5113 } IEM_MC_ENDIF();
5114 IEM_MC_ADVANCE_RIP();
5115 IEM_MC_END();
5116 }
5117 else
5118 {
5119 /* memory target */
5120 IEM_MC_BEGIN(0, 1);
5121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5124 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5125 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5126 } IEM_MC_ELSE() {
5127 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5128 } IEM_MC_ENDIF();
5129 IEM_MC_ADVANCE_RIP();
5130 IEM_MC_END();
5131 }
5132 return VINF_SUCCESS;
5133}
5134
5135
5136/**
5137 * Common 'push segment-register' helper.
5138 */
5139FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5140{
5141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5142 if (iReg < X86_SREG_FS)
5143 IEMOP_HLP_NO_64BIT();
5144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5145
5146 switch (pVCpu->iem.s.enmEffOpSize)
5147 {
5148 case IEMMODE_16BIT:
5149 IEM_MC_BEGIN(0, 1);
5150 IEM_MC_LOCAL(uint16_t, u16Value);
5151 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5152 IEM_MC_PUSH_U16(u16Value);
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 break;
5156
5157 case IEMMODE_32BIT:
5158 IEM_MC_BEGIN(0, 1);
5159 IEM_MC_LOCAL(uint32_t, u32Value);
5160 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5161 IEM_MC_PUSH_U32_SREG(u32Value);
5162 IEM_MC_ADVANCE_RIP();
5163 IEM_MC_END();
5164 break;
5165
5166 case IEMMODE_64BIT:
5167 IEM_MC_BEGIN(0, 1);
5168 IEM_MC_LOCAL(uint64_t, u64Value);
5169 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5170 IEM_MC_PUSH_U64(u64Value);
5171 IEM_MC_ADVANCE_RIP();
5172 IEM_MC_END();
5173 break;
5174 }
5175
5176 return VINF_SUCCESS;
5177}
5178
5179
5180/** Opcode 0x0f 0xa0. */
5181FNIEMOP_DEF(iemOp_push_fs)
5182{
5183 IEMOP_MNEMONIC(push_fs, "push fs");
5184 IEMOP_HLP_MIN_386();
5185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5186 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5187}
5188
5189
5190/** Opcode 0x0f 0xa1. */
5191FNIEMOP_DEF(iemOp_pop_fs)
5192{
5193 IEMOP_MNEMONIC(pop_fs, "pop fs");
5194 IEMOP_HLP_MIN_386();
5195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5196 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5197}
5198
5199
5200/** Opcode 0x0f 0xa2. */
5201FNIEMOP_DEF(iemOp_cpuid)
5202{
5203 IEMOP_MNEMONIC(cpuid, "cpuid");
5204 IEMOP_HLP_MIN_486(); /* not all 486es. */
5205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5206 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5207}
5208
5209
5210/**
5211 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5212 * iemOp_bts_Ev_Gv.
5213 */
5214FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5215{
5216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5218
5219 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5220 {
5221 /* register destination. */
5222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5223 switch (pVCpu->iem.s.enmEffOpSize)
5224 {
5225 case IEMMODE_16BIT:
5226 IEM_MC_BEGIN(3, 0);
5227 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5228 IEM_MC_ARG(uint16_t, u16Src, 1);
5229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5230
5231 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5232 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5233 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_REF_EFLAGS(pEFlags);
5235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5236
5237 IEM_MC_ADVANCE_RIP();
5238 IEM_MC_END();
5239 return VINF_SUCCESS;
5240
5241 case IEMMODE_32BIT:
5242 IEM_MC_BEGIN(3, 0);
5243 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5244 IEM_MC_ARG(uint32_t, u32Src, 1);
5245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5246
5247 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5248 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5249 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5250 IEM_MC_REF_EFLAGS(pEFlags);
5251 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5252
5253 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5254 IEM_MC_ADVANCE_RIP();
5255 IEM_MC_END();
5256 return VINF_SUCCESS;
5257
5258 case IEMMODE_64BIT:
5259 IEM_MC_BEGIN(3, 0);
5260 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5261 IEM_MC_ARG(uint64_t, u64Src, 1);
5262 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5263
5264 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5265 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5266 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5267 IEM_MC_REF_EFLAGS(pEFlags);
5268 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5269
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 return VINF_SUCCESS;
5273
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 }
5277 else
5278 {
5279 /* memory destination. */
5280
5281 uint32_t fAccess;
5282 if (pImpl->pfnLockedU16)
5283 fAccess = IEM_ACCESS_DATA_RW;
5284 else /* BT */
5285 fAccess = IEM_ACCESS_DATA_R;
5286
5287 /** @todo test negative bit offsets! */
5288 switch (pVCpu->iem.s.enmEffOpSize)
5289 {
5290 case IEMMODE_16BIT:
5291 IEM_MC_BEGIN(3, 2);
5292 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5293 IEM_MC_ARG(uint16_t, u16Src, 1);
5294 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5296 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5297
5298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5299 if (pImpl->pfnLockedU16)
5300 IEMOP_HLP_DONE_DECODING();
5301 else
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5305 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5306 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5307 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1);
5308 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5309 IEM_MC_FETCH_EFLAGS(EFlags);
5310
5311 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5312 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5314 else
5315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5316 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5317
5318 IEM_MC_COMMIT_EFLAGS(EFlags);
5319 IEM_MC_ADVANCE_RIP();
5320 IEM_MC_END();
5321 return VINF_SUCCESS;
5322
5323 case IEMMODE_32BIT:
5324 IEM_MC_BEGIN(3, 2);
5325 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5326 IEM_MC_ARG(uint32_t, u32Src, 1);
5327 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5329 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5330
5331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5332 if (pImpl->pfnLockedU16)
5333 IEMOP_HLP_DONE_DECODING();
5334 else
5335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5336 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5337 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5338 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5339 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5340 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5341 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5342 IEM_MC_FETCH_EFLAGS(EFlags);
5343
5344 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5345 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5347 else
5348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5350
5351 IEM_MC_COMMIT_EFLAGS(EFlags);
5352 IEM_MC_ADVANCE_RIP();
5353 IEM_MC_END();
5354 return VINF_SUCCESS;
5355
5356 case IEMMODE_64BIT:
5357 IEM_MC_BEGIN(3, 2);
5358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5359 IEM_MC_ARG(uint64_t, u64Src, 1);
5360 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5363
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5365 if (pImpl->pfnLockedU16)
5366 IEMOP_HLP_DONE_DECODING();
5367 else
5368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5369 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5370 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5371 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5372 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5373 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5374 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5375 IEM_MC_FETCH_EFLAGS(EFlags);
5376
5377 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5378 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5379 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5380 else
5381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5382 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5383
5384 IEM_MC_COMMIT_EFLAGS(EFlags);
5385 IEM_MC_ADVANCE_RIP();
5386 IEM_MC_END();
5387 return VINF_SUCCESS;
5388
5389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5390 }
5391 }
5392}
5393
5394
5395/** Opcode 0x0f 0xa3. */
5396FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5397{
5398 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5399 IEMOP_HLP_MIN_386();
5400 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5401}
5402
5403
5404/**
5405 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5406 */
5407FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5408{
5409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5411
5412 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5413 {
5414 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416
5417 switch (pVCpu->iem.s.enmEffOpSize)
5418 {
5419 case IEMMODE_16BIT:
5420 IEM_MC_BEGIN(4, 0);
5421 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5422 IEM_MC_ARG(uint16_t, u16Src, 1);
5423 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5424 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5425
5426 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5427 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5428 IEM_MC_REF_EFLAGS(pEFlags);
5429 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5430
5431 IEM_MC_ADVANCE_RIP();
5432 IEM_MC_END();
5433 return VINF_SUCCESS;
5434
5435 case IEMMODE_32BIT:
5436 IEM_MC_BEGIN(4, 0);
5437 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5438 IEM_MC_ARG(uint32_t, u32Src, 1);
5439 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5440 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5441
5442 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5443 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5444 IEM_MC_REF_EFLAGS(pEFlags);
5445 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5446
5447 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5448 IEM_MC_ADVANCE_RIP();
5449 IEM_MC_END();
5450 return VINF_SUCCESS;
5451
5452 case IEMMODE_64BIT:
5453 IEM_MC_BEGIN(4, 0);
5454 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5455 IEM_MC_ARG(uint64_t, u64Src, 1);
5456 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5457 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5458
5459 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5460 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5461 IEM_MC_REF_EFLAGS(pEFlags);
5462 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5463
5464 IEM_MC_ADVANCE_RIP();
5465 IEM_MC_END();
5466 return VINF_SUCCESS;
5467
5468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5469 }
5470 }
5471 else
5472 {
5473 switch (pVCpu->iem.s.enmEffOpSize)
5474 {
5475 case IEMMODE_16BIT:
5476 IEM_MC_BEGIN(4, 2);
5477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5478 IEM_MC_ARG(uint16_t, u16Src, 1);
5479 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5480 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5482
5483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5484 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5485 IEM_MC_ASSIGN(cShiftArg, cShift);
5486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5487 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5488 IEM_MC_FETCH_EFLAGS(EFlags);
5489 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5490 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5491
5492 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5493 IEM_MC_COMMIT_EFLAGS(EFlags);
5494 IEM_MC_ADVANCE_RIP();
5495 IEM_MC_END();
5496 return VINF_SUCCESS;
5497
5498 case IEMMODE_32BIT:
5499 IEM_MC_BEGIN(4, 2);
5500 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5501 IEM_MC_ARG(uint32_t, u32Src, 1);
5502 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5503 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5505
5506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5507 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5508 IEM_MC_ASSIGN(cShiftArg, cShift);
5509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5510 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5511 IEM_MC_FETCH_EFLAGS(EFlags);
5512 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5513 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5514
5515 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5516 IEM_MC_COMMIT_EFLAGS(EFlags);
5517 IEM_MC_ADVANCE_RIP();
5518 IEM_MC_END();
5519 return VINF_SUCCESS;
5520
5521 case IEMMODE_64BIT:
5522 IEM_MC_BEGIN(4, 2);
5523 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5524 IEM_MC_ARG(uint64_t, u64Src, 1);
5525 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5528
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5530 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5531 IEM_MC_ASSIGN(cShiftArg, cShift);
5532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5533 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5534 IEM_MC_FETCH_EFLAGS(EFlags);
5535 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5536 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5537
5538 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5539 IEM_MC_COMMIT_EFLAGS(EFlags);
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 return VINF_SUCCESS;
5543
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546 }
5547}
5548
5549
5550/**
5551 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5552 */
5553FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5554{
5555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5557
5558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5559 {
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561
5562 switch (pVCpu->iem.s.enmEffOpSize)
5563 {
5564 case IEMMODE_16BIT:
5565 IEM_MC_BEGIN(4, 0);
5566 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5567 IEM_MC_ARG(uint16_t, u16Src, 1);
5568 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5569 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5570
5571 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5572 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5574 IEM_MC_REF_EFLAGS(pEFlags);
5575 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5576
5577 IEM_MC_ADVANCE_RIP();
5578 IEM_MC_END();
5579 return VINF_SUCCESS;
5580
5581 case IEMMODE_32BIT:
5582 IEM_MC_BEGIN(4, 0);
5583 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5584 IEM_MC_ARG(uint32_t, u32Src, 1);
5585 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5586 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5587
5588 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5589 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5590 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5591 IEM_MC_REF_EFLAGS(pEFlags);
5592 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5593
5594 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5595 IEM_MC_ADVANCE_RIP();
5596 IEM_MC_END();
5597 return VINF_SUCCESS;
5598
5599 case IEMMODE_64BIT:
5600 IEM_MC_BEGIN(4, 0);
5601 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5602 IEM_MC_ARG(uint64_t, u64Src, 1);
5603 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5604 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5605
5606 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5607 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5608 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5609 IEM_MC_REF_EFLAGS(pEFlags);
5610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5611
5612 IEM_MC_ADVANCE_RIP();
5613 IEM_MC_END();
5614 return VINF_SUCCESS;
5615
5616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5617 }
5618 }
5619 else
5620 {
5621 switch (pVCpu->iem.s.enmEffOpSize)
5622 {
5623 case IEMMODE_16BIT:
5624 IEM_MC_BEGIN(4, 2);
5625 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5626 IEM_MC_ARG(uint16_t, u16Src, 1);
5627 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5630
5631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5634 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5635 IEM_MC_FETCH_EFLAGS(EFlags);
5636 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5637 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5638
5639 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5640 IEM_MC_COMMIT_EFLAGS(EFlags);
5641 IEM_MC_ADVANCE_RIP();
5642 IEM_MC_END();
5643 return VINF_SUCCESS;
5644
5645 case IEMMODE_32BIT:
5646 IEM_MC_BEGIN(4, 2);
5647 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5648 IEM_MC_ARG(uint32_t, u32Src, 1);
5649 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5650 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5652
5653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5655 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5656 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5657 IEM_MC_FETCH_EFLAGS(EFlags);
5658 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5659 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5660
5661 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5662 IEM_MC_COMMIT_EFLAGS(EFlags);
5663 IEM_MC_ADVANCE_RIP();
5664 IEM_MC_END();
5665 return VINF_SUCCESS;
5666
5667 case IEMMODE_64BIT:
5668 IEM_MC_BEGIN(4, 2);
5669 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5670 IEM_MC_ARG(uint64_t, u64Src, 1);
5671 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5674
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5678 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5679 IEM_MC_FETCH_EFLAGS(EFlags);
5680 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5681 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5682
5683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5684 IEM_MC_COMMIT_EFLAGS(EFlags);
5685 IEM_MC_ADVANCE_RIP();
5686 IEM_MC_END();
5687 return VINF_SUCCESS;
5688
5689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5690 }
5691 }
5692}
5693
5694
5695
5696/** Opcode 0x0f 0xa4. */
5697FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5698{
5699 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5700 IEMOP_HLP_MIN_386();
5701 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5702}
5703
5704
5705/** Opcode 0x0f 0xa5. */
5706FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5707{
5708 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5709 IEMOP_HLP_MIN_386();
5710 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5711}
5712
5713
5714/** Opcode 0x0f 0xa8. */
5715FNIEMOP_DEF(iemOp_push_gs)
5716{
5717 IEMOP_MNEMONIC(push_gs, "push gs");
5718 IEMOP_HLP_MIN_386();
5719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5720 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5721}
5722
5723
5724/** Opcode 0x0f 0xa9. */
5725FNIEMOP_DEF(iemOp_pop_gs)
5726{
5727 IEMOP_MNEMONIC(pop_gs, "pop gs");
5728 IEMOP_HLP_MIN_386();
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5731}
5732
5733
5734/** Opcode 0x0f 0xaa. */
5735FNIEMOP_STUB(iemOp_rsm);
5736//IEMOP_HLP_MIN_386();
5737
5738
5739/** Opcode 0x0f 0xab. */
5740FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5741{
5742 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5743 IEMOP_HLP_MIN_386();
5744 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5745}
5746
5747
5748/** Opcode 0x0f 0xac. */
5749FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5750{
5751 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5752 IEMOP_HLP_MIN_386();
5753 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5754}
5755
5756
5757/** Opcode 0x0f 0xad. */
5758FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5759{
5760 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5761 IEMOP_HLP_MIN_386();
5762 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5763}
5764
5765
5766/** Opcode 0x0f 0xae mem/0. */
5767FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5768{
5769 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5771 return IEMOP_RAISE_INVALID_OPCODE();
5772
5773 IEM_MC_BEGIN(3, 1);
5774 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5775 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5776 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5780 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5781 IEM_MC_END();
5782 return VINF_SUCCESS;
5783}
5784
5785
5786/** Opcode 0x0f 0xae mem/1. */
5787FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5788{
5789 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5790 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5791 return IEMOP_RAISE_INVALID_OPCODE();
5792
5793 IEM_MC_BEGIN(3, 1);
5794 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5795 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5796 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5800 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5801 IEM_MC_END();
5802 return VINF_SUCCESS;
5803}
5804
5805
5806/** Opcode 0x0f 0xae mem/2. */
5807FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5808
5809/** Opcode 0x0f 0xae mem/3. */
5810FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5811
5812/** Opcode 0x0f 0xae mem/4. */
5813FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5814
5815/** Opcode 0x0f 0xae mem/5. */
5816FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5817
5818/** Opcode 0x0f 0xae mem/6. */
5819FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5820
5821/** Opcode 0x0f 0xae mem/7. */
5822FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5823
5824
5825/** Opcode 0x0f 0xae 11b/5. */
5826FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5827{
5828 RT_NOREF_PV(bRm);
5829 IEMOP_MNEMONIC(lfence, "lfence");
5830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5831 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5832 return IEMOP_RAISE_INVALID_OPCODE();
5833
5834 IEM_MC_BEGIN(0, 0);
5835 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5836 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5837 else
5838 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842}
5843
5844
5845/** Opcode 0x0f 0xae 11b/6. */
5846FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5847{
5848 RT_NOREF_PV(bRm);
5849 IEMOP_MNEMONIC(mfence, "mfence");
5850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5851 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5852 return IEMOP_RAISE_INVALID_OPCODE();
5853
5854 IEM_MC_BEGIN(0, 0);
5855 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5856 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5857 else
5858 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5859 IEM_MC_ADVANCE_RIP();
5860 IEM_MC_END();
5861 return VINF_SUCCESS;
5862}
5863
5864
5865/** Opcode 0x0f 0xae 11b/7. */
5866FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5867{
5868 RT_NOREF_PV(bRm);
5869 IEMOP_MNEMONIC(sfence, "sfence");
5870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5871 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5872 return IEMOP_RAISE_INVALID_OPCODE();
5873
5874 IEM_MC_BEGIN(0, 0);
5875 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5876 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5877 else
5878 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5879 IEM_MC_ADVANCE_RIP();
5880 IEM_MC_END();
5881 return VINF_SUCCESS;
5882}
5883
5884
5885/** Opcode 0xf3 0x0f 0xae 11b/0. */
5886FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5887
5888/** Opcode 0xf3 0x0f 0xae 11b/1. */
5889FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5890
5891/** Opcode 0xf3 0x0f 0xae 11b/2. */
5892FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5893
5894/** Opcode 0xf3 0x0f 0xae 11b/3. */
5895FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5896
5897
5898/** Opcode 0x0f 0xae. */
5899FNIEMOP_DEF(iemOp_Grp15)
5900{
5901 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5903 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5904 {
5905 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5906 {
5907 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5908 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5909 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5910 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5911 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5912 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5913 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5914 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5916 }
5917 }
5918 else
5919 {
5920 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5921 {
5922 case 0:
5923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5924 {
5925 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5926 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5927 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5928 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5929 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5930 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5931 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5932 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5933 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5934 }
5935 break;
5936
5937 case IEM_OP_PRF_REPZ:
5938 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5939 {
5940 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5941 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5942 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5943 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5944 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5945 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5946 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5947 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5949 }
5950 break;
5951
5952 default:
5953 return IEMOP_RAISE_INVALID_OPCODE();
5954 }
5955 }
5956}
5957
5958
5959/** Opcode 0x0f 0xaf. */
5960FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5961{
5962 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5963 IEMOP_HLP_MIN_386();
5964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5965 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5966}
5967
5968
5969/** Opcode 0x0f 0xb0. */
5970FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5971{
5972 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5973 IEMOP_HLP_MIN_486();
5974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5975
5976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5977 {
5978 IEMOP_HLP_DONE_DECODING();
5979 IEM_MC_BEGIN(4, 0);
5980 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5981 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5982 IEM_MC_ARG(uint8_t, u8Src, 2);
5983 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5984
5985 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5986 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5987 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5988 IEM_MC_REF_EFLAGS(pEFlags);
5989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5990 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5991 else
5992 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5993
5994 IEM_MC_ADVANCE_RIP();
5995 IEM_MC_END();
5996 }
5997 else
5998 {
5999 IEM_MC_BEGIN(4, 3);
6000 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6001 IEM_MC_ARG(uint8_t *, pu8Al, 1);
6002 IEM_MC_ARG(uint8_t, u8Src, 2);
6003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6005 IEM_MC_LOCAL(uint8_t, u8Al);
6006
6007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6008 IEMOP_HLP_DONE_DECODING();
6009 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6010 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6011 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
6012 IEM_MC_FETCH_EFLAGS(EFlags);
6013 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6015 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6016 else
6017 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6018
6019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6020 IEM_MC_COMMIT_EFLAGS(EFlags);
6021 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6022 IEM_MC_ADVANCE_RIP();
6023 IEM_MC_END();
6024 }
6025 return VINF_SUCCESS;
6026}
6027
6028/** Opcode 0x0f 0xb1. */
6029FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6030{
6031 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6032 IEMOP_HLP_MIN_486();
6033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6034
6035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6036 {
6037 IEMOP_HLP_DONE_DECODING();
6038 switch (pVCpu->iem.s.enmEffOpSize)
6039 {
6040 case IEMMODE_16BIT:
6041 IEM_MC_BEGIN(4, 0);
6042 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6043 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6044 IEM_MC_ARG(uint16_t, u16Src, 2);
6045 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6046
6047 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6048 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6049 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6050 IEM_MC_REF_EFLAGS(pEFlags);
6051 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6052 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6053 else
6054 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6055
6056 IEM_MC_ADVANCE_RIP();
6057 IEM_MC_END();
6058 return VINF_SUCCESS;
6059
6060 case IEMMODE_32BIT:
6061 IEM_MC_BEGIN(4, 0);
6062 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6063 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6064 IEM_MC_ARG(uint32_t, u32Src, 2);
6065 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6066
6067 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6068 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6069 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6070 IEM_MC_REF_EFLAGS(pEFlags);
6071 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6072 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6073 else
6074 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6075
6076 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6077 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6078 IEM_MC_ADVANCE_RIP();
6079 IEM_MC_END();
6080 return VINF_SUCCESS;
6081
6082 case IEMMODE_64BIT:
6083 IEM_MC_BEGIN(4, 0);
6084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6085 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6086#ifdef RT_ARCH_X86
6087 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6088#else
6089 IEM_MC_ARG(uint64_t, u64Src, 2);
6090#endif
6091 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6092
6093 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6094 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6095 IEM_MC_REF_EFLAGS(pEFlags);
6096#ifdef RT_ARCH_X86
6097 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6098 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6099 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6100 else
6101 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6102#else
6103 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6104 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6105 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6106 else
6107 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6108#endif
6109
6110 IEM_MC_ADVANCE_RIP();
6111 IEM_MC_END();
6112 return VINF_SUCCESS;
6113
6114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6115 }
6116 }
6117 else
6118 {
6119 switch (pVCpu->iem.s.enmEffOpSize)
6120 {
6121 case IEMMODE_16BIT:
6122 IEM_MC_BEGIN(4, 3);
6123 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6124 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6125 IEM_MC_ARG(uint16_t, u16Src, 2);
6126 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6128 IEM_MC_LOCAL(uint16_t, u16Ax);
6129
6130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6131 IEMOP_HLP_DONE_DECODING();
6132 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6133 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6134 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6135 IEM_MC_FETCH_EFLAGS(EFlags);
6136 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6137 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6138 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6139 else
6140 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6141
6142 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6143 IEM_MC_COMMIT_EFLAGS(EFlags);
6144 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6145 IEM_MC_ADVANCE_RIP();
6146 IEM_MC_END();
6147 return VINF_SUCCESS;
6148
6149 case IEMMODE_32BIT:
6150 IEM_MC_BEGIN(4, 3);
6151 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6152 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6153 IEM_MC_ARG(uint32_t, u32Src, 2);
6154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6156 IEM_MC_LOCAL(uint32_t, u32Eax);
6157
6158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6159 IEMOP_HLP_DONE_DECODING();
6160 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6161 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6162 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6163 IEM_MC_FETCH_EFLAGS(EFlags);
6164 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6165 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6166 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6167 else
6168 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6169
6170 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6171 IEM_MC_COMMIT_EFLAGS(EFlags);
6172 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6173 IEM_MC_ADVANCE_RIP();
6174 IEM_MC_END();
6175 return VINF_SUCCESS;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(4, 3);
6179 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6180 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6181#ifdef RT_ARCH_X86
6182 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6183#else
6184 IEM_MC_ARG(uint64_t, u64Src, 2);
6185#endif
6186 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6188 IEM_MC_LOCAL(uint64_t, u64Rax);
6189
6190 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6191 IEMOP_HLP_DONE_DECODING();
6192 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6193 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6194 IEM_MC_FETCH_EFLAGS(EFlags);
6195 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6196#ifdef RT_ARCH_X86
6197 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6198 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6199 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6200 else
6201 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6202#else
6203 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6204 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6205 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6206 else
6207 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6208#endif
6209
6210 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6211 IEM_MC_COMMIT_EFLAGS(EFlags);
6212 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6213 IEM_MC_ADVANCE_RIP();
6214 IEM_MC_END();
6215 return VINF_SUCCESS;
6216
6217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6218 }
6219 }
6220}
6221
6222
6223FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6224{
6225 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6226 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6227
6228 switch (pVCpu->iem.s.enmEffOpSize)
6229 {
6230 case IEMMODE_16BIT:
6231 IEM_MC_BEGIN(5, 1);
6232 IEM_MC_ARG(uint16_t, uSel, 0);
6233 IEM_MC_ARG(uint16_t, offSeg, 1);
6234 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6235 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6236 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6237 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6241 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6242 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(5, 1);
6248 IEM_MC_ARG(uint16_t, uSel, 0);
6249 IEM_MC_ARG(uint32_t, offSeg, 1);
6250 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6251 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6252 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6253 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6256 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6257 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6258 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6259 IEM_MC_END();
6260 return VINF_SUCCESS;
6261
6262 case IEMMODE_64BIT:
6263 IEM_MC_BEGIN(5, 1);
6264 IEM_MC_ARG(uint16_t, uSel, 0);
6265 IEM_MC_ARG(uint64_t, offSeg, 1);
6266 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6267 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6268 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6269 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6272 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6273 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6274 else
6275 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6276 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6277 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6278 IEM_MC_END();
6279 return VINF_SUCCESS;
6280
6281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6282 }
6283}
6284
6285
6286/** Opcode 0x0f 0xb2. */
6287FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6288{
6289 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6290 IEMOP_HLP_MIN_386();
6291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6293 return IEMOP_RAISE_INVALID_OPCODE();
6294 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6295}
6296
6297
6298/** Opcode 0x0f 0xb3. */
6299FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6300{
6301 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6302 IEMOP_HLP_MIN_386();
6303 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6304}
6305
6306
6307/** Opcode 0x0f 0xb4. */
6308FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6309{
6310 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6311 IEMOP_HLP_MIN_386();
6312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6314 return IEMOP_RAISE_INVALID_OPCODE();
6315 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6316}
6317
6318
6319/** Opcode 0x0f 0xb5. */
6320FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6321{
6322 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6323 IEMOP_HLP_MIN_386();
6324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6325 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6326 return IEMOP_RAISE_INVALID_OPCODE();
6327 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6328}
6329
6330
6331/** Opcode 0x0f 0xb6. */
6332FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6333{
6334 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6335 IEMOP_HLP_MIN_386();
6336
6337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6338
6339 /*
6340 * If rm is denoting a register, no more instruction bytes.
6341 */
6342 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6343 {
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 switch (pVCpu->iem.s.enmEffOpSize)
6346 {
6347 case IEMMODE_16BIT:
6348 IEM_MC_BEGIN(0, 1);
6349 IEM_MC_LOCAL(uint16_t, u16Value);
6350 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6351 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_32BIT:
6357 IEM_MC_BEGIN(0, 1);
6358 IEM_MC_LOCAL(uint32_t, u32Value);
6359 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6360 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6361 IEM_MC_ADVANCE_RIP();
6362 IEM_MC_END();
6363 return VINF_SUCCESS;
6364
6365 case IEMMODE_64BIT:
6366 IEM_MC_BEGIN(0, 1);
6367 IEM_MC_LOCAL(uint64_t, u64Value);
6368 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6369 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6370 IEM_MC_ADVANCE_RIP();
6371 IEM_MC_END();
6372 return VINF_SUCCESS;
6373
6374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6375 }
6376 }
6377 else
6378 {
6379 /*
6380 * We're loading a register from memory.
6381 */
6382 switch (pVCpu->iem.s.enmEffOpSize)
6383 {
6384 case IEMMODE_16BIT:
6385 IEM_MC_BEGIN(0, 2);
6386 IEM_MC_LOCAL(uint16_t, u16Value);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6390 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6391 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6392 IEM_MC_ADVANCE_RIP();
6393 IEM_MC_END();
6394 return VINF_SUCCESS;
6395
6396 case IEMMODE_32BIT:
6397 IEM_MC_BEGIN(0, 2);
6398 IEM_MC_LOCAL(uint32_t, u32Value);
6399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6402 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6403 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6404 IEM_MC_ADVANCE_RIP();
6405 IEM_MC_END();
6406 return VINF_SUCCESS;
6407
6408 case IEMMODE_64BIT:
6409 IEM_MC_BEGIN(0, 2);
6410 IEM_MC_LOCAL(uint64_t, u64Value);
6411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6414 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6415 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 return VINF_SUCCESS;
6419
6420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6421 }
6422 }
6423}
6424
6425
6426/** Opcode 0x0f 0xb7. */
6427FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6428{
6429 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6430 IEMOP_HLP_MIN_386();
6431
6432 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6433
6434 /** @todo Not entirely sure how the operand size prefix is handled here,
6435 * assuming that it will be ignored. Would be nice to have a few
6436 * test for this. */
6437 /*
6438 * If rm is denoting a register, no more instruction bytes.
6439 */
6440 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6441 {
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6443 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6444 {
6445 IEM_MC_BEGIN(0, 1);
6446 IEM_MC_LOCAL(uint32_t, u32Value);
6447 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6448 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6449 IEM_MC_ADVANCE_RIP();
6450 IEM_MC_END();
6451 }
6452 else
6453 {
6454 IEM_MC_BEGIN(0, 1);
6455 IEM_MC_LOCAL(uint64_t, u64Value);
6456 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6457 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6458 IEM_MC_ADVANCE_RIP();
6459 IEM_MC_END();
6460 }
6461 }
6462 else
6463 {
6464 /*
6465 * We're loading a register from memory.
6466 */
6467 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6468 {
6469 IEM_MC_BEGIN(0, 2);
6470 IEM_MC_LOCAL(uint32_t, u32Value);
6471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6474 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6475 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6476 IEM_MC_ADVANCE_RIP();
6477 IEM_MC_END();
6478 }
6479 else
6480 {
6481 IEM_MC_BEGIN(0, 2);
6482 IEM_MC_LOCAL(uint64_t, u64Value);
6483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6487 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 }
6491 }
6492 return VINF_SUCCESS;
6493}
6494
6495
6496/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6497FNIEMOP_UD_STUB(iemOp_jmpe);
6498/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6499FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6500
6501
6502/** Opcode 0x0f 0xb9. */
6503FNIEMOP_DEF(iemOp_Grp10)
6504{
6505 Log(("iemOp_Grp10 -> #UD\n"));
6506 return IEMOP_RAISE_INVALID_OPCODE();
6507}
6508
6509
6510/** Opcode 0x0f 0xba. */
6511FNIEMOP_DEF(iemOp_Grp8)
6512{
6513 IEMOP_HLP_MIN_386();
6514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6515 PCIEMOPBINSIZES pImpl;
6516 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6517 {
6518 case 0: case 1: case 2: case 3:
6519 return IEMOP_RAISE_INVALID_OPCODE();
6520 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6521 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6522 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6523 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6525 }
6526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6527
6528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6529 {
6530 /* register destination. */
6531 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533
6534 switch (pVCpu->iem.s.enmEffOpSize)
6535 {
6536 case IEMMODE_16BIT:
6537 IEM_MC_BEGIN(3, 0);
6538 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6539 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6541
6542 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6543 IEM_MC_REF_EFLAGS(pEFlags);
6544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6545
6546 IEM_MC_ADVANCE_RIP();
6547 IEM_MC_END();
6548 return VINF_SUCCESS;
6549
6550 case IEMMODE_32BIT:
6551 IEM_MC_BEGIN(3, 0);
6552 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6553 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6555
6556 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6557 IEM_MC_REF_EFLAGS(pEFlags);
6558 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6559
6560 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 case IEMMODE_64BIT:
6566 IEM_MC_BEGIN(3, 0);
6567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6568 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6569 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6570
6571 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6572 IEM_MC_REF_EFLAGS(pEFlags);
6573 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6574
6575 IEM_MC_ADVANCE_RIP();
6576 IEM_MC_END();
6577 return VINF_SUCCESS;
6578
6579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6580 }
6581 }
6582 else
6583 {
6584 /* memory destination. */
6585
6586 uint32_t fAccess;
6587 if (pImpl->pfnLockedU16)
6588 fAccess = IEM_ACCESS_DATA_RW;
6589 else /* BT */
6590 fAccess = IEM_ACCESS_DATA_R;
6591
6592 /** @todo test negative bit offsets! */
6593 switch (pVCpu->iem.s.enmEffOpSize)
6594 {
6595 case IEMMODE_16BIT:
6596 IEM_MC_BEGIN(3, 1);
6597 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6598 IEM_MC_ARG(uint16_t, u16Src, 1);
6599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6601
6602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6603 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6604 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6605 if (pImpl->pfnLockedU16)
6606 IEMOP_HLP_DONE_DECODING();
6607 else
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6609 IEM_MC_FETCH_EFLAGS(EFlags);
6610 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6611 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6612 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6613 else
6614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6615 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6616
6617 IEM_MC_COMMIT_EFLAGS(EFlags);
6618 IEM_MC_ADVANCE_RIP();
6619 IEM_MC_END();
6620 return VINF_SUCCESS;
6621
6622 case IEMMODE_32BIT:
6623 IEM_MC_BEGIN(3, 1);
6624 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6625 IEM_MC_ARG(uint32_t, u32Src, 1);
6626 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6628
6629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6630 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6631 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6632 if (pImpl->pfnLockedU16)
6633 IEMOP_HLP_DONE_DECODING();
6634 else
6635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6636 IEM_MC_FETCH_EFLAGS(EFlags);
6637 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6638 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6640 else
6641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6643
6644 IEM_MC_COMMIT_EFLAGS(EFlags);
6645 IEM_MC_ADVANCE_RIP();
6646 IEM_MC_END();
6647 return VINF_SUCCESS;
6648
6649 case IEMMODE_64BIT:
6650 IEM_MC_BEGIN(3, 1);
6651 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6652 IEM_MC_ARG(uint64_t, u64Src, 1);
6653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6655
6656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6657 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6658 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6659 if (pImpl->pfnLockedU16)
6660 IEMOP_HLP_DONE_DECODING();
6661 else
6662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6663 IEM_MC_FETCH_EFLAGS(EFlags);
6664 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6665 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6666 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6667 else
6668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6670
6671 IEM_MC_COMMIT_EFLAGS(EFlags);
6672 IEM_MC_ADVANCE_RIP();
6673 IEM_MC_END();
6674 return VINF_SUCCESS;
6675
6676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6677 }
6678 }
6679
6680}
6681
6682
6683/** Opcode 0x0f 0xbb. */
6684FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6685{
6686 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6687 IEMOP_HLP_MIN_386();
6688 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6689}
6690
6691
6692/** Opcode 0x0f 0xbc. */
6693FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6694{
6695 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6696 IEMOP_HLP_MIN_386();
6697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6699}
6700
6701
6702/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6703FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6704
6705
6706/** Opcode 0x0f 0xbd. */
6707FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6708{
6709 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6710 IEMOP_HLP_MIN_386();
6711 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6712 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6713}
6714
6715
6716/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6717FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6718
6719
6720/** Opcode 0x0f 0xbe. */
6721FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6722{
6723 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6724 IEMOP_HLP_MIN_386();
6725
6726 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6727
6728 /*
6729 * If rm is denoting a register, no more instruction bytes.
6730 */
6731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6732 {
6733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6734 switch (pVCpu->iem.s.enmEffOpSize)
6735 {
6736 case IEMMODE_16BIT:
6737 IEM_MC_BEGIN(0, 1);
6738 IEM_MC_LOCAL(uint16_t, u16Value);
6739 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6740 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6741 IEM_MC_ADVANCE_RIP();
6742 IEM_MC_END();
6743 return VINF_SUCCESS;
6744
6745 case IEMMODE_32BIT:
6746 IEM_MC_BEGIN(0, 1);
6747 IEM_MC_LOCAL(uint32_t, u32Value);
6748 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6749 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 return VINF_SUCCESS;
6753
6754 case IEMMODE_64BIT:
6755 IEM_MC_BEGIN(0, 1);
6756 IEM_MC_LOCAL(uint64_t, u64Value);
6757 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6758 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6759 IEM_MC_ADVANCE_RIP();
6760 IEM_MC_END();
6761 return VINF_SUCCESS;
6762
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765 }
6766 else
6767 {
6768 /*
6769 * We're loading a register from memory.
6770 */
6771 switch (pVCpu->iem.s.enmEffOpSize)
6772 {
6773 case IEMMODE_16BIT:
6774 IEM_MC_BEGIN(0, 2);
6775 IEM_MC_LOCAL(uint16_t, u16Value);
6776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6779 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6780 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6781 IEM_MC_ADVANCE_RIP();
6782 IEM_MC_END();
6783 return VINF_SUCCESS;
6784
6785 case IEMMODE_32BIT:
6786 IEM_MC_BEGIN(0, 2);
6787 IEM_MC_LOCAL(uint32_t, u32Value);
6788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6792 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6793 IEM_MC_ADVANCE_RIP();
6794 IEM_MC_END();
6795 return VINF_SUCCESS;
6796
6797 case IEMMODE_64BIT:
6798 IEM_MC_BEGIN(0, 2);
6799 IEM_MC_LOCAL(uint64_t, u64Value);
6800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6803 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6804 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6805 IEM_MC_ADVANCE_RIP();
6806 IEM_MC_END();
6807 return VINF_SUCCESS;
6808
6809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6810 }
6811 }
6812}
6813
6814
6815/** Opcode 0x0f 0xbf. */
6816FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6817{
6818 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6819 IEMOP_HLP_MIN_386();
6820
6821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6822
6823 /** @todo Not entirely sure how the operand size prefix is handled here,
6824 * assuming that it will be ignored. Would be nice to have a few
6825 * test for this. */
6826 /*
6827 * If rm is denoting a register, no more instruction bytes.
6828 */
6829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6830 {
6831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6832 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6833 {
6834 IEM_MC_BEGIN(0, 1);
6835 IEM_MC_LOCAL(uint32_t, u32Value);
6836 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6837 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6838 IEM_MC_ADVANCE_RIP();
6839 IEM_MC_END();
6840 }
6841 else
6842 {
6843 IEM_MC_BEGIN(0, 1);
6844 IEM_MC_LOCAL(uint64_t, u64Value);
6845 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6846 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6847 IEM_MC_ADVANCE_RIP();
6848 IEM_MC_END();
6849 }
6850 }
6851 else
6852 {
6853 /*
6854 * We're loading a register from memory.
6855 */
6856 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6857 {
6858 IEM_MC_BEGIN(0, 2);
6859 IEM_MC_LOCAL(uint32_t, u32Value);
6860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6863 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6864 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6865 IEM_MC_ADVANCE_RIP();
6866 IEM_MC_END();
6867 }
6868 else
6869 {
6870 IEM_MC_BEGIN(0, 2);
6871 IEM_MC_LOCAL(uint64_t, u64Value);
6872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6875 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6876 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6877 IEM_MC_ADVANCE_RIP();
6878 IEM_MC_END();
6879 }
6880 }
6881 return VINF_SUCCESS;
6882}
6883
6884
6885/** Opcode 0x0f 0xc0. */
6886FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6887{
6888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6889 IEMOP_HLP_MIN_486();
6890 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6891
6892 /*
6893 * If rm is denoting a register, no more instruction bytes.
6894 */
6895 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6896 {
6897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6898
6899 IEM_MC_BEGIN(3, 0);
6900 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6901 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6903
6904 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6905 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6906 IEM_MC_REF_EFLAGS(pEFlags);
6907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6908
6909 IEM_MC_ADVANCE_RIP();
6910 IEM_MC_END();
6911 }
6912 else
6913 {
6914 /*
6915 * We're accessing memory.
6916 */
6917 IEM_MC_BEGIN(3, 3);
6918 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6919 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6920 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6921 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6923
6924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6925 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6926 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6927 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6928 IEM_MC_FETCH_EFLAGS(EFlags);
6929 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6931 else
6932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6933
6934 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6935 IEM_MC_COMMIT_EFLAGS(EFlags);
6936 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6937 IEM_MC_ADVANCE_RIP();
6938 IEM_MC_END();
6939 return VINF_SUCCESS;
6940 }
6941 return VINF_SUCCESS;
6942}
6943
6944
6945/** Opcode 0x0f 0xc1. */
6946FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6947{
6948 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6949 IEMOP_HLP_MIN_486();
6950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6951
6952 /*
6953 * If rm is denoting a register, no more instruction bytes.
6954 */
6955 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6956 {
6957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6958
6959 switch (pVCpu->iem.s.enmEffOpSize)
6960 {
6961 case IEMMODE_16BIT:
6962 IEM_MC_BEGIN(3, 0);
6963 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6964 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6966
6967 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6968 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6969 IEM_MC_REF_EFLAGS(pEFlags);
6970 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6971
6972 IEM_MC_ADVANCE_RIP();
6973 IEM_MC_END();
6974 return VINF_SUCCESS;
6975
6976 case IEMMODE_32BIT:
6977 IEM_MC_BEGIN(3, 0);
6978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6979 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6981
6982 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6983 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6984 IEM_MC_REF_EFLAGS(pEFlags);
6985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6986
6987 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6988 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6989 IEM_MC_ADVANCE_RIP();
6990 IEM_MC_END();
6991 return VINF_SUCCESS;
6992
6993 case IEMMODE_64BIT:
6994 IEM_MC_BEGIN(3, 0);
6995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6996 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6998
6999 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7000 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7001 IEM_MC_REF_EFLAGS(pEFlags);
7002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7003
7004 IEM_MC_ADVANCE_RIP();
7005 IEM_MC_END();
7006 return VINF_SUCCESS;
7007
7008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7009 }
7010 }
7011 else
7012 {
7013 /*
7014 * We're accessing memory.
7015 */
7016 switch (pVCpu->iem.s.enmEffOpSize)
7017 {
7018 case IEMMODE_16BIT:
7019 IEM_MC_BEGIN(3, 3);
7020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7021 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7022 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7023 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7025
7026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7027 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7028 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7029 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7030 IEM_MC_FETCH_EFLAGS(EFlags);
7031 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7032 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7033 else
7034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7035
7036 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7037 IEM_MC_COMMIT_EFLAGS(EFlags);
7038 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7039 IEM_MC_ADVANCE_RIP();
7040 IEM_MC_END();
7041 return VINF_SUCCESS;
7042
7043 case IEMMODE_32BIT:
7044 IEM_MC_BEGIN(3, 3);
7045 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7046 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7047 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7048 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7050
7051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7052 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7053 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7054 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7055 IEM_MC_FETCH_EFLAGS(EFlags);
7056 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7057 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7058 else
7059 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7060
7061 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7062 IEM_MC_COMMIT_EFLAGS(EFlags);
7063 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7064 IEM_MC_ADVANCE_RIP();
7065 IEM_MC_END();
7066 return VINF_SUCCESS;
7067
7068 case IEMMODE_64BIT:
7069 IEM_MC_BEGIN(3, 3);
7070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7071 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7072 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7073 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7075
7076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7077 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7078 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7079 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7080 IEM_MC_FETCH_EFLAGS(EFlags);
7081 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7082 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7083 else
7084 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7085
7086 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7087 IEM_MC_COMMIT_EFLAGS(EFlags);
7088 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7089 IEM_MC_ADVANCE_RIP();
7090 IEM_MC_END();
7091 return VINF_SUCCESS;
7092
7093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7094 }
7095 }
7096}
7097
7098
7099/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7100FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7101/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7102FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7103/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7104FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7105/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7106FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7107
7108
7109/** Opcode 0x0f 0xc3. */
7110FNIEMOP_DEF(iemOp_movnti_My_Gy)
7111{
7112 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7113
7114 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7115
7116 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7117 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7118 {
7119 switch (pVCpu->iem.s.enmEffOpSize)
7120 {
7121 case IEMMODE_32BIT:
7122 IEM_MC_BEGIN(0, 2);
7123 IEM_MC_LOCAL(uint32_t, u32Value);
7124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7125
7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7128 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7129 return IEMOP_RAISE_INVALID_OPCODE();
7130
7131 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7132 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7133 IEM_MC_ADVANCE_RIP();
7134 IEM_MC_END();
7135 break;
7136
7137 case IEMMODE_64BIT:
7138 IEM_MC_BEGIN(0, 2);
7139 IEM_MC_LOCAL(uint64_t, u64Value);
7140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7141
7142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7145 return IEMOP_RAISE_INVALID_OPCODE();
7146
7147 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7148 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7149 IEM_MC_ADVANCE_RIP();
7150 IEM_MC_END();
7151 break;
7152
7153 case IEMMODE_16BIT:
7154 /** @todo check this form. */
7155 return IEMOP_RAISE_INVALID_OPCODE();
7156 }
7157 }
7158 else
7159 return IEMOP_RAISE_INVALID_OPCODE();
7160 return VINF_SUCCESS;
7161}
7162/* Opcode 0x66 0x0f 0xc3 - invalid */
7163/* Opcode 0xf3 0x0f 0xc3 - invalid */
7164/* Opcode 0xf2 0x0f 0xc3 - invalid */
7165
7166/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7167FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7168/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7169FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7170/* Opcode 0xf3 0x0f 0xc4 - invalid */
7171/* Opcode 0xf2 0x0f 0xc4 - invalid */
7172
7173/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7174FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7175/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7176FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7177/* Opcode 0xf3 0x0f 0xc5 - invalid */
7178/* Opcode 0xf2 0x0f 0xc5 - invalid */
7179
7180/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7181FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7182/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7183FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7184/* Opcode 0xf3 0x0f 0xc6 - invalid */
7185/* Opcode 0xf2 0x0f 0xc6 - invalid */
7186
7187
7188/** Opcode 0x0f 0xc7 !11/1. */
7189FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7190{
7191 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7192
7193 IEM_MC_BEGIN(4, 3);
7194 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7195 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7196 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7197 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7198 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7199 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7201
7202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7203 IEMOP_HLP_DONE_DECODING();
7204 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7205
7206 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7207 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7208 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7209
7210 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7211 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7212 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7213
7214 IEM_MC_FETCH_EFLAGS(EFlags);
7215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7217 else
7218 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7219
7220 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7221 IEM_MC_COMMIT_EFLAGS(EFlags);
7222 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7223 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7224 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7225 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7226 IEM_MC_ENDIF();
7227 IEM_MC_ADVANCE_RIP();
7228
7229 IEM_MC_END();
7230 return VINF_SUCCESS;
7231}
7232
7233
7234/** Opcode REX.W 0x0f 0xc7 !11/1. */
7235FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7236{
7237 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7238 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7239 {
7240#if 0
7241 RT_NOREF(bRm);
7242 IEMOP_BITCH_ABOUT_STUB();
7243 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7244#else
7245 IEM_MC_BEGIN(4, 3);
7246 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7247 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7248 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7249 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7250 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7251 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7253
7254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7255 IEMOP_HLP_DONE_DECODING();
7256 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7257 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7258
7259 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7260 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7261 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7262
7263 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7264 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7265 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7266
7267 IEM_MC_FETCH_EFLAGS(EFlags);
7268# ifdef RT_ARCH_AMD64
7269 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7270 {
7271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7273 else
7274 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7275 }
7276 else
7277# endif
7278 {
7279 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7280 accesses and not all all atomic, which works fine on in UNI CPU guest
7281 configuration (ignoring DMA). If guest SMP is active we have no choice
7282 but to use a rendezvous callback here. Sigh. */
7283 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7284 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7285 else
7286 {
7287 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7288 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7289 }
7290 }
7291
7292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7293 IEM_MC_COMMIT_EFLAGS(EFlags);
7294 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7295 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7296 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7297 IEM_MC_ENDIF();
7298 IEM_MC_ADVANCE_RIP();
7299
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302#endif
7303 }
7304 Log(("cmpxchg16b -> #UD\n"));
7305 return IEMOP_RAISE_INVALID_OPCODE();
7306}
7307
7308
7309/** Opcode 0x0f 0xc7 11/6. */
7310FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7311
7312/** Opcode 0x0f 0xc7 !11/6. */
7313FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7314
7315/** Opcode 0x66 0x0f 0xc7 !11/6. */
7316FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7317
7318/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7319FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7320
7321/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7322FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7323
7324
7325/** Opcode 0x0f 0xc7. */
7326FNIEMOP_DEF(iemOp_Grp9)
7327{
7328 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7330 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7331 {
7332 case 0: case 2: case 3: case 4: case 5:
7333 return IEMOP_RAISE_INVALID_OPCODE();
7334 case 1:
7335 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7336 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7337 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7338 return IEMOP_RAISE_INVALID_OPCODE();
7339 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7340 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7341 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7342 case 6:
7343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7344 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7345 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7346 {
7347 case 0:
7348 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7349 case IEM_OP_PRF_SIZE_OP:
7350 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7351 case IEM_OP_PRF_REPZ:
7352 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7353 default:
7354 return IEMOP_RAISE_INVALID_OPCODE();
7355 }
7356 case 7:
7357 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7358 {
7359 case 0:
7360 case IEM_OP_PRF_REPZ:
7361 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7362 default:
7363 return IEMOP_RAISE_INVALID_OPCODE();
7364 }
7365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7366 }
7367}
7368
7369
7370/**
7371 * Common 'bswap register' helper.
7372 */
7373FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7374{
7375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7376 switch (pVCpu->iem.s.enmEffOpSize)
7377 {
7378 case IEMMODE_16BIT:
7379 IEM_MC_BEGIN(1, 0);
7380 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7381 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7382 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7383 IEM_MC_ADVANCE_RIP();
7384 IEM_MC_END();
7385 return VINF_SUCCESS;
7386
7387 case IEMMODE_32BIT:
7388 IEM_MC_BEGIN(1, 0);
7389 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7390 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7391 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7392 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7393 IEM_MC_ADVANCE_RIP();
7394 IEM_MC_END();
7395 return VINF_SUCCESS;
7396
7397 case IEMMODE_64BIT:
7398 IEM_MC_BEGIN(1, 0);
7399 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7400 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7401 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7402 IEM_MC_ADVANCE_RIP();
7403 IEM_MC_END();
7404 return VINF_SUCCESS;
7405
7406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7407 }
7408}
7409
7410
7411/** Opcode 0x0f 0xc8. */
7412FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7413{
7414 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7415 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7416 prefix. REX.B is the correct prefix it appears. For a parallel
7417 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7418 IEMOP_HLP_MIN_486();
7419 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7420}
7421
7422
7423/** Opcode 0x0f 0xc9. */
7424FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7425{
7426 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7427 IEMOP_HLP_MIN_486();
7428 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7429}
7430
7431
7432/** Opcode 0x0f 0xca. */
7433FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7434{
7435 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7436 IEMOP_HLP_MIN_486();
7437 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7438}
7439
7440
7441/** Opcode 0x0f 0xcb. */
7442FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7443{
7444 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7445 IEMOP_HLP_MIN_486();
7446 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7447}
7448
7449
7450/** Opcode 0x0f 0xcc. */
7451FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7452{
7453 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7454 IEMOP_HLP_MIN_486();
7455 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7456}
7457
7458
7459/** Opcode 0x0f 0xcd. */
7460FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7461{
7462 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7463 IEMOP_HLP_MIN_486();
7464 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7465}
7466
7467
7468/** Opcode 0x0f 0xce. */
7469FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7470{
7471 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7472 IEMOP_HLP_MIN_486();
7473 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7474}
7475
7476
7477/** Opcode 0x0f 0xcf. */
7478FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7479{
7480 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7481 IEMOP_HLP_MIN_486();
7482 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7483}
7484
7485
7486/* Opcode 0x0f 0xd0 - invalid */
7487/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7488FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7489/* Opcode 0xf3 0x0f 0xd0 - invalid */
7490/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7491FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7492
7493/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7494FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7495/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7496FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7497/* Opcode 0xf3 0x0f 0xd1 - invalid */
7498/* Opcode 0xf2 0x0f 0xd1 - invalid */
7499
7500/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7501FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7502/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7503FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7504/* Opcode 0xf3 0x0f 0xd2 - invalid */
7505/* Opcode 0xf2 0x0f 0xd2 - invalid */
7506
7507/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7508FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7509/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7510FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7511/* Opcode 0xf3 0x0f 0xd3 - invalid */
7512/* Opcode 0xf2 0x0f 0xd3 - invalid */
7513
7514/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7515FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7516/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7517FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7518/* Opcode 0xf3 0x0f 0xd4 - invalid */
7519/* Opcode 0xf2 0x0f 0xd4 - invalid */
7520
7521/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7522FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7523/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7524FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7525/* Opcode 0xf3 0x0f 0xd5 - invalid */
7526/* Opcode 0xf2 0x0f 0xd5 - invalid */
7527
7528/* Opcode 0x0f 0xd6 - invalid */
7529/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7530FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7531/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7532FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7533/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7534FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7535#if 0
7536FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7537{
7538 /* Docs says register only. */
7539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7540
7541 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7542 {
7543 case IEM_OP_PRF_SIZE_OP: /* SSE */
7544 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7545 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7546 IEM_MC_BEGIN(2, 0);
7547 IEM_MC_ARG(uint64_t *, pDst, 0);
7548 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7549 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7550 IEM_MC_PREPARE_SSE_USAGE();
7551 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7552 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7553 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7554 IEM_MC_ADVANCE_RIP();
7555 IEM_MC_END();
7556 return VINF_SUCCESS;
7557
7558 case 0: /* MMX */
7559 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7560 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7561 IEM_MC_BEGIN(2, 0);
7562 IEM_MC_ARG(uint64_t *, pDst, 0);
7563 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7564 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7565 IEM_MC_PREPARE_FPU_USAGE();
7566 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7567 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7568 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7569 IEM_MC_ADVANCE_RIP();
7570 IEM_MC_END();
7571 return VINF_SUCCESS;
7572
7573 default:
7574 return IEMOP_RAISE_INVALID_OPCODE();
7575 }
7576}
7577#endif
7578
7579
7580/** Opcode 0x0f 0xd7. */
7581FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7582{
7583 /* Docs says register only. */
7584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7585 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7586 return IEMOP_RAISE_INVALID_OPCODE();
7587
7588 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7589 /** @todo testcase: Check that the instruction implicitly clears the high
7590 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7591 * and opcode modifications are made to work with the whole width (not
7592 * just 128). */
7593 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7594 {
7595 case IEM_OP_PRF_SIZE_OP: /* SSE */
7596 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7597 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7598 IEM_MC_BEGIN(2, 0);
7599 IEM_MC_ARG(uint64_t *, pDst, 0);
7600 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7601 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7602 IEM_MC_PREPARE_SSE_USAGE();
7603 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7604 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7605 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7606 IEM_MC_ADVANCE_RIP();
7607 IEM_MC_END();
7608 return VINF_SUCCESS;
7609
7610 case 0: /* MMX */
7611 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7612 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7613 IEM_MC_BEGIN(2, 0);
7614 IEM_MC_ARG(uint64_t *, pDst, 0);
7615 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7616 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7617 IEM_MC_PREPARE_FPU_USAGE();
7618 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7619 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7620 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7621 IEM_MC_ADVANCE_RIP();
7622 IEM_MC_END();
7623 return VINF_SUCCESS;
7624
7625 default:
7626 return IEMOP_RAISE_INVALID_OPCODE();
7627 }
7628}
7629
7630
7631/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7632FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7633/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7634FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7635/* Opcode 0xf3 0x0f 0xd8 - invalid */
7636/* Opcode 0xf2 0x0f 0xd8 - invalid */
7637
7638/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7639FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7640/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7641FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7642/* Opcode 0xf3 0x0f 0xd9 - invalid */
7643/* Opcode 0xf2 0x0f 0xd9 - invalid */
7644
7645/** Opcode 0x0f 0xda - pminub Pq, Qq */
7646FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7647/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7648FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7649/* Opcode 0xf3 0x0f 0xda - invalid */
7650/* Opcode 0xf2 0x0f 0xda - invalid */
7651
7652/** Opcode 0x0f 0xdb - pand Pq, Qq */
7653FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7654/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7655FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7656/* Opcode 0xf3 0x0f 0xdb - invalid */
7657/* Opcode 0xf2 0x0f 0xdb - invalid */
7658
7659/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7660FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7661/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7662FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7663/* Opcode 0xf3 0x0f 0xdc - invalid */
7664/* Opcode 0xf2 0x0f 0xdc - invalid */
7665
7666/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7667FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7668/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7669FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7670/* Opcode 0xf3 0x0f 0xdd - invalid */
7671/* Opcode 0xf2 0x0f 0xdd - invalid */
7672
7673/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7674FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7675/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7676FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7677/* Opcode 0xf3 0x0f 0xde - invalid */
7678/* Opcode 0xf2 0x0f 0xde - invalid */
7679
7680/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7681FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7682/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7683FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7684/* Opcode 0xf3 0x0f 0xdf - invalid */
7685/* Opcode 0xf2 0x0f 0xdf - invalid */
7686
7687/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7688FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7689/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7690FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7691/* Opcode 0xf3 0x0f 0xe0 - invalid */
7692/* Opcode 0xf2 0x0f 0xe0 - invalid */
7693
7694/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7695FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7696/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7697FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7698/* Opcode 0xf3 0x0f 0xe1 - invalid */
7699/* Opcode 0xf2 0x0f 0xe1 - invalid */
7700
7701/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7702FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7703/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7704FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7705/* Opcode 0xf3 0x0f 0xe2 - invalid */
7706/* Opcode 0xf2 0x0f 0xe2 - invalid */
7707
7708/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7709FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7710/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7711FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7712/* Opcode 0xf3 0x0f 0xe3 - invalid */
7713/* Opcode 0xf2 0x0f 0xe3 - invalid */
7714
7715/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7716FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7717/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7718FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7719/* Opcode 0xf3 0x0f 0xe4 - invalid */
7720/* Opcode 0xf2 0x0f 0xe4 - invalid */
7721
7722/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7723FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7724/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7725FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7726/* Opcode 0xf3 0x0f 0xe5 - invalid */
7727/* Opcode 0xf2 0x0f 0xe5 - invalid */
7728
7729/* Opcode 0x0f 0xe6 - invalid */
7730/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7731FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7732/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7733FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7734/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7735FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7736
7737
7738/** Opcode 0x0f 0xe7. */
7739FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7740{
7741 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7742 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7743 {
7744 /*
7745 * Register, memory.
7746 */
7747/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7748 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7749 {
7750
7751 case IEM_OP_PRF_SIZE_OP: /* SSE */
7752 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7753 IEM_MC_BEGIN(0, 2);
7754 IEM_MC_LOCAL(uint128_t, uSrc);
7755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7756
7757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7760 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7761
7762 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7763 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7764
7765 IEM_MC_ADVANCE_RIP();
7766 IEM_MC_END();
7767 break;
7768
7769 case 0: /* MMX */
7770 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7771 IEM_MC_BEGIN(0, 2);
7772 IEM_MC_LOCAL(uint64_t, uSrc);
7773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7774
7775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7778 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7779
7780 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7781 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7782
7783 IEM_MC_ADVANCE_RIP();
7784 IEM_MC_END();
7785 break;
7786
7787 default:
7788 return IEMOP_RAISE_INVALID_OPCODE();
7789 }
7790 }
7791 /* The register, register encoding is invalid. */
7792 else
7793 return IEMOP_RAISE_INVALID_OPCODE();
7794 return VINF_SUCCESS;
7795}
7796
7797
7798/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7799FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7800/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7801FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7802/* Opcode 0xf3 0x0f 0xe8 - invalid */
7803/* Opcode 0xf2 0x0f 0xe8 - invalid */
7804
7805/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7806FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7807/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7808FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7809/* Opcode 0xf3 0x0f 0xe9 - invalid */
7810/* Opcode 0xf2 0x0f 0xe9 - invalid */
7811
7812/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7813FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7814/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7815FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7816/* Opcode 0xf3 0x0f 0xea - invalid */
7817/* Opcode 0xf2 0x0f 0xea - invalid */
7818
7819/** Opcode 0x0f 0xeb - por Pq, Qq */
7820FNIEMOP_STUB(iemOp_por_Pq_Qq);
7821/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7822FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7823/* Opcode 0xf3 0x0f 0xeb - invalid */
7824/* Opcode 0xf2 0x0f 0xeb - invalid */
7825
7826/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7827FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7828/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7829FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7830/* Opcode 0xf3 0x0f 0xec - invalid */
7831/* Opcode 0xf2 0x0f 0xec - invalid */
7832
7833/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7834FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7835/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7836FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7837/* Opcode 0xf3 0x0f 0xed - invalid */
7838/* Opcode 0xf2 0x0f 0xed - invalid */
7839
7840/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7841FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7842/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7843FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7844/* Opcode 0xf3 0x0f 0xee - invalid */
7845/* Opcode 0xf2 0x0f 0xee - invalid */
7846
7847
7848/** Opcode 0x0f 0xef. */
7849FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7850{
7851 IEMOP_MNEMONIC(pxor, "pxor");
7852 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7853}
7854/* Opcode 0xf3 0x0f 0xef - invalid */
7855/* Opcode 0xf2 0x0f 0xef - invalid */
7856
7857/* Opcode 0x0f 0xf0 - invalid */
7858/* Opcode 0x66 0x0f 0xf0 - invalid */
7859/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7860FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7861
7862/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7863FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7864/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7865FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7866/* Opcode 0xf2 0x0f 0xf1 - invalid */
7867
7868/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7869FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7870/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7871FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7872/* Opcode 0xf2 0x0f 0xf2 - invalid */
7873
7874/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7875FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7876/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7877FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7878/* Opcode 0xf2 0x0f 0xf3 - invalid */
7879
7880/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7881FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7882/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7883FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7884/* Opcode 0xf2 0x0f 0xf4 - invalid */
7885
7886/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7887FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7888/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7889FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7890/* Opcode 0xf2 0x0f 0xf5 - invalid */
7891
7892/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7893FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7894/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7895FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7896/* Opcode 0xf2 0x0f 0xf6 - invalid */
7897
7898/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7899FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7900/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7901FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7902/* Opcode 0xf2 0x0f 0xf7 - invalid */
7903
7904/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7905FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7906/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7907FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7908/* Opcode 0xf2 0x0f 0xf8 - invalid */
7909
7910/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7911FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7912/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7913FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7914/* Opcode 0xf2 0x0f 0xf9 - invalid */
7915
7916/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7917FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7918/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7919FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7920/* Opcode 0xf2 0x0f 0xfa - invalid */
7921
7922/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7923FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7924/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7925FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7926/* Opcode 0xf2 0x0f 0xfb - invalid */
7927
7928/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7929FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7930/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7931FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7932/* Opcode 0xf2 0x0f 0xfc - invalid */
7933
7934/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7935FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7936/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7937FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7938/* Opcode 0xf2 0x0f 0xfd - invalid */
7939
7940/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7941FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7942/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7943FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7944/* Opcode 0xf2 0x0f 0xfe - invalid */
7945
7946
7947/** Opcode **** 0x0f 0xff - UD0 */
7948FNIEMOP_DEF(iemOp_ud0)
7949{
7950 IEMOP_MNEMONIC(ud0, "ud0");
7951 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7952 {
7953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7954#ifndef TST_IEM_CHECK_MC
7955 RTGCPTR GCPtrEff;
7956 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7957 if (rcStrict != VINF_SUCCESS)
7958 return rcStrict;
7959#endif
7960 IEMOP_HLP_DONE_DECODING();
7961 }
7962 return IEMOP_RAISE_INVALID_OPCODE();
7963}
7964
7965
7966
7967/** Repeats a_fn four times. For decoding tables. */
7968#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7969
7970IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7971{
7972 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7973 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7974 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7975 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7976 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7977 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7978 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7979 /* 0x06 */ IEMOP_X4(iemOp_clts),
7980 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7981 /* 0x08 */ IEMOP_X4(iemOp_invd),
7982 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7983 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7984 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7985 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7986 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7987 /* 0x0e */ IEMOP_X4(iemOp_femms),
7988 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7989
7990 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7991 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7992 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7993 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7994 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7995 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7996 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7997 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7998 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7999 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
8000 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
8001 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
8002 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
8003 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
8004 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
8005 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
8006
8007 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
8008 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
8009 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
8010 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
8011 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
8012 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8013 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8014 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8015 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8016 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8017 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8018 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8019 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8020 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8021 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8022 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8023
8024 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8025 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8026 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8027 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8028 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8029 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8030 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8031 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8032 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8033 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8034 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8035 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8036 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8037 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8038 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8039 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8040
8041 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8042 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8043 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8044 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8045 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8046 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8047 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8048 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8049 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8050 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8051 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8052 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8053 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8054 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8055 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8056 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8057
8058 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8059 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8060 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8061 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8062 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8063 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8064 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8065 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8066 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8067 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8068 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8069 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8070 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8071 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8072 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8073 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8074
8075 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8076 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8077 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8078 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8079 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8080 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8081 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8082 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8083 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8084 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8085 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8086 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8087 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8089 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8090 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8091
8092 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8093 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8094 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8095 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8096 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8097 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8098 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8099 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8100
8101 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8102 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8103 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8104 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8105 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8106 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8107 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8108 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8109
8110 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8111 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8112 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8113 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8114 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8115 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8116 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8117 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8118 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8119 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8120 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8121 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8122 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8123 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8124 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8125 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8126
8127 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8128 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8129 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8130 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8131 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8132 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8133 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8134 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8135 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8136 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8137 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8138 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8139 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8140 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8141 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8142 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8143
8144 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8145 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8146 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8147 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8148 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8149 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8150 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8151 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8152 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8153 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8154 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8155 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8156 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8157 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8158 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8159 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8160
8161 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8162 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8163 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8164 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8165 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8166 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8167 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8168 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8169 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8170 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8171 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8172 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8173 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8174 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8175 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8176 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8177
8178 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8179 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8180 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8181 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8182 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8183 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8184 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8185 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8186 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8187 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8188 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8189 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8190 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8191 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8192 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8193 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8194
8195 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8196 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8198 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8202 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8203 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8205 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211
8212 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8215 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8216 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8217 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8219 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228
8229 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8230 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8231 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8232 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8233 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8234 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8235 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8236 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8237 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8238 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8239 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8240 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8241 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8242 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8243 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8244 /* 0xff */ IEMOP_X4(iemOp_ud0),
8245};
8246AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8247/** @} */
8248
8249
8250/** @name One byte opcodes.
8251 *
8252 * @{
8253 */
8254
8255/** Opcode 0x00. */
8256FNIEMOP_DEF(iemOp_add_Eb_Gb)
8257{
8258 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8259 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8260}
8261
8262
8263/** Opcode 0x01. */
8264FNIEMOP_DEF(iemOp_add_Ev_Gv)
8265{
8266 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8267 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8268}
8269
8270
8271/** Opcode 0x02. */
8272FNIEMOP_DEF(iemOp_add_Gb_Eb)
8273{
8274 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8275 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8276}
8277
8278
8279/** Opcode 0x03. */
8280FNIEMOP_DEF(iemOp_add_Gv_Ev)
8281{
8282 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8283 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8284}
8285
8286
8287/** Opcode 0x04. */
8288FNIEMOP_DEF(iemOp_add_Al_Ib)
8289{
8290 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8291 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8292}
8293
8294
8295/** Opcode 0x05. */
8296FNIEMOP_DEF(iemOp_add_eAX_Iz)
8297{
8298 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8299 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8300}
8301
8302
8303/** Opcode 0x06. */
8304FNIEMOP_DEF(iemOp_push_ES)
8305{
8306 IEMOP_MNEMONIC(push_es, "push es");
8307 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8308}
8309
8310
8311/** Opcode 0x07. */
8312FNIEMOP_DEF(iemOp_pop_ES)
8313{
8314 IEMOP_MNEMONIC(pop_es, "pop es");
8315 IEMOP_HLP_NO_64BIT();
8316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8317 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8318}
8319
8320
8321/** Opcode 0x08. */
8322FNIEMOP_DEF(iemOp_or_Eb_Gb)
8323{
8324 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8325 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8326 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8327}
8328
8329
8330/** Opcode 0x09. */
8331FNIEMOP_DEF(iemOp_or_Ev_Gv)
8332{
8333 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8335 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8336}
8337
8338
8339/** Opcode 0x0a. */
8340FNIEMOP_DEF(iemOp_or_Gb_Eb)
8341{
8342 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8344 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8345}
8346
8347
8348/** Opcode 0x0b. */
8349FNIEMOP_DEF(iemOp_or_Gv_Ev)
8350{
8351 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8352 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8353 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8354}
8355
8356
8357/** Opcode 0x0c. */
8358FNIEMOP_DEF(iemOp_or_Al_Ib)
8359{
8360 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8362 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8363}
8364
8365
8366/** Opcode 0x0d. */
8367FNIEMOP_DEF(iemOp_or_eAX_Iz)
8368{
8369 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8370 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8371 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8372}
8373
8374
8375/** Opcode 0x0e. */
8376FNIEMOP_DEF(iemOp_push_CS)
8377{
8378 IEMOP_MNEMONIC(push_cs, "push cs");
8379 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8380}
8381
8382
8383/** Opcode 0x0f. */
8384FNIEMOP_DEF(iemOp_2byteEscape)
8385{
8386#ifdef VBOX_STRICT
8387 static bool s_fTested = false;
8388 if (RT_LIKELY(s_fTested)) { /* likely */ }
8389 else
8390 {
8391 s_fTested = true;
8392 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8393 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8394 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8395 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8396 }
8397#endif
8398
8399 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8400
8401 /** @todo PUSH CS on 8086, undefined on 80186. */
8402 IEMOP_HLP_MIN_286();
8403 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8404}
8405
8406/** Opcode 0x10. */
8407FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8408{
8409 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8410 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8411}
8412
8413
8414/** Opcode 0x11. */
8415FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8416{
8417 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8419}
8420
8421
8422/** Opcode 0x12. */
8423FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8424{
8425 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8426 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8427}
8428
8429
8430/** Opcode 0x13. */
8431FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8432{
8433 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8434 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8435}
8436
8437
8438/** Opcode 0x14. */
8439FNIEMOP_DEF(iemOp_adc_Al_Ib)
8440{
8441 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8442 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8443}
8444
8445
8446/** Opcode 0x15. */
8447FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8448{
8449 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8450 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8451}
8452
8453
8454/** Opcode 0x16. */
8455FNIEMOP_DEF(iemOp_push_SS)
8456{
8457 IEMOP_MNEMONIC(push_ss, "push ss");
8458 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8459}
8460
8461
8462/** Opcode 0x17. */
8463FNIEMOP_DEF(iemOp_pop_SS)
8464{
8465 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8467 IEMOP_HLP_NO_64BIT();
8468 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8469}
8470
8471
8472/** Opcode 0x18. */
8473FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8474{
8475 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8477}
8478
8479
8480/** Opcode 0x19. */
8481FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8482{
8483 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8485}
8486
8487
8488/** Opcode 0x1a. */
8489FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8490{
8491 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8492 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8493}
8494
8495
8496/** Opcode 0x1b. */
8497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8498{
8499 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8501}
8502
8503
8504/** Opcode 0x1c. */
8505FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8506{
8507 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8508 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8509}
8510
8511
8512/** Opcode 0x1d. */
8513FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8514{
8515 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8516 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8517}
8518
8519
8520/** Opcode 0x1e. */
8521FNIEMOP_DEF(iemOp_push_DS)
8522{
8523 IEMOP_MNEMONIC(push_ds, "push ds");
8524 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8525}
8526
8527
8528/** Opcode 0x1f. */
8529FNIEMOP_DEF(iemOp_pop_DS)
8530{
8531 IEMOP_MNEMONIC(pop_ds, "pop ds");
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEMOP_HLP_NO_64BIT();
8534 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8535}
8536
8537
8538/** Opcode 0x20. */
8539FNIEMOP_DEF(iemOp_and_Eb_Gb)
8540{
8541 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8543 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8544}
8545
8546
8547/** Opcode 0x21. */
8548FNIEMOP_DEF(iemOp_and_Ev_Gv)
8549{
8550 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8551 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8553}
8554
8555
8556/** Opcode 0x22. */
8557FNIEMOP_DEF(iemOp_and_Gb_Eb)
8558{
8559 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8561 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8562}
8563
8564
8565/** Opcode 0x23. */
8566FNIEMOP_DEF(iemOp_and_Gv_Ev)
8567{
8568 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8571}
8572
8573
8574/** Opcode 0x24. */
8575FNIEMOP_DEF(iemOp_and_Al_Ib)
8576{
8577 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8578 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8579 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8580}
8581
8582
8583/** Opcode 0x25. */
8584FNIEMOP_DEF(iemOp_and_eAX_Iz)
8585{
8586 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8588 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8589}
8590
8591
8592/** Opcode 0x26. */
8593FNIEMOP_DEF(iemOp_seg_ES)
8594{
8595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8597 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8598
8599 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8600 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8601}
8602
8603
8604/** Opcode 0x27. */
8605FNIEMOP_DEF(iemOp_daa)
8606{
8607 IEMOP_MNEMONIC(daa_AL, "daa AL");
8608 IEMOP_HLP_NO_64BIT();
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8611 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8612}
8613
8614
8615/** Opcode 0x28. */
8616FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8617{
8618 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8619 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8620}
8621
8622
8623/** Opcode 0x29. */
8624FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8625{
8626 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8627 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8628}
8629
8630
8631/** Opcode 0x2a. */
8632FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8633{
8634 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8636}
8637
8638
8639/** Opcode 0x2b. */
8640FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8641{
8642 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8643 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8644}
8645
8646
8647/** Opcode 0x2c. */
8648FNIEMOP_DEF(iemOp_sub_Al_Ib)
8649{
8650 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8652}
8653
8654
8655/** Opcode 0x2d. */
8656FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8657{
8658 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8660}
8661
8662
8663/** Opcode 0x2e. */
8664FNIEMOP_DEF(iemOp_seg_CS)
8665{
8666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8668 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8669
8670 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8671 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8672}
8673
8674
8675/** Opcode 0x2f. */
8676FNIEMOP_DEF(iemOp_das)
8677{
8678 IEMOP_MNEMONIC(das_AL, "das AL");
8679 IEMOP_HLP_NO_64BIT();
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8682 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8683}
8684
8685
8686/** Opcode 0x30. */
8687FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8688{
8689 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8690 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8691 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8692}
8693
8694
8695/** Opcode 0x31. */
8696FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8697{
8698 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8700 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8701}
8702
8703
8704/** Opcode 0x32. */
8705FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8706{
8707 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8709 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8710}
8711
8712
8713/** Opcode 0x33. */
8714FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8715{
8716 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8718 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8719}
8720
8721
8722/** Opcode 0x34. */
8723FNIEMOP_DEF(iemOp_xor_Al_Ib)
8724{
8725 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8726 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8727 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8728}
8729
8730
8731/** Opcode 0x35. */
8732FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8733{
8734 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8735 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8736 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8737}
8738
8739
8740/** Opcode 0x36. */
8741FNIEMOP_DEF(iemOp_seg_SS)
8742{
8743 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8744 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8745 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8746
8747 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8748 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8749}
8750
8751
8752/** Opcode 0x37. */
8753FNIEMOP_STUB(iemOp_aaa);
8754
8755
8756/** Opcode 0x38. */
8757FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8758{
8759 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8760 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8761}
8762
8763
8764/** Opcode 0x39. */
8765FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8766{
8767 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8768 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8769}
8770
8771
8772/** Opcode 0x3a. */
8773FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8774{
8775 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8776 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8777}
8778
8779
8780/** Opcode 0x3b. */
8781FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8782{
8783 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8784 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8785}
8786
8787
8788/** Opcode 0x3c. */
8789FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8790{
8791 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8792 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8793}
8794
8795
8796/** Opcode 0x3d. */
8797FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8798{
8799 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8801}
8802
8803
8804/** Opcode 0x3e. */
8805FNIEMOP_DEF(iemOp_seg_DS)
8806{
8807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8809 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8810
8811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8813}
8814
8815
8816/** Opcode 0x3f. */
8817FNIEMOP_STUB(iemOp_aas);
8818
8819/**
8820 * Common 'inc/dec/not/neg register' helper.
8821 */
8822FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8823{
8824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8825 switch (pVCpu->iem.s.enmEffOpSize)
8826 {
8827 case IEMMODE_16BIT:
8828 IEM_MC_BEGIN(2, 0);
8829 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8830 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8831 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8832 IEM_MC_REF_EFLAGS(pEFlags);
8833 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8834 IEM_MC_ADVANCE_RIP();
8835 IEM_MC_END();
8836 return VINF_SUCCESS;
8837
8838 case IEMMODE_32BIT:
8839 IEM_MC_BEGIN(2, 0);
8840 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8841 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8842 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8843 IEM_MC_REF_EFLAGS(pEFlags);
8844 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8845 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8846 IEM_MC_ADVANCE_RIP();
8847 IEM_MC_END();
8848 return VINF_SUCCESS;
8849
8850 case IEMMODE_64BIT:
8851 IEM_MC_BEGIN(2, 0);
8852 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8853 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8854 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8855 IEM_MC_REF_EFLAGS(pEFlags);
8856 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8857 IEM_MC_ADVANCE_RIP();
8858 IEM_MC_END();
8859 return VINF_SUCCESS;
8860 }
8861 return VINF_SUCCESS;
8862}
8863
8864
8865/** Opcode 0x40. */
8866FNIEMOP_DEF(iemOp_inc_eAX)
8867{
8868 /*
8869 * This is a REX prefix in 64-bit mode.
8870 */
8871 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8872 {
8873 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8874 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8875
8876 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8877 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8878 }
8879
8880 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8881 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8882}
8883
8884
8885/** Opcode 0x41. */
8886FNIEMOP_DEF(iemOp_inc_eCX)
8887{
8888 /*
8889 * This is a REX prefix in 64-bit mode.
8890 */
8891 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8892 {
8893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8895 pVCpu->iem.s.uRexB = 1 << 3;
8896
8897 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8898 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8899 }
8900
8901 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8902 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8903}
8904
8905
8906/** Opcode 0x42. */
8907FNIEMOP_DEF(iemOp_inc_eDX)
8908{
8909 /*
8910 * This is a REX prefix in 64-bit mode.
8911 */
8912 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8913 {
8914 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8915 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8916 pVCpu->iem.s.uRexIndex = 1 << 3;
8917
8918 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8919 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8920 }
8921
8922 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8923 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8924}
8925
8926
8927
8928/** Opcode 0x43. */
8929FNIEMOP_DEF(iemOp_inc_eBX)
8930{
8931 /*
8932 * This is a REX prefix in 64-bit mode.
8933 */
8934 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8935 {
8936 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8937 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8938 pVCpu->iem.s.uRexB = 1 << 3;
8939 pVCpu->iem.s.uRexIndex = 1 << 3;
8940
8941 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8942 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8943 }
8944
8945 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8946 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8947}
8948
8949
8950/** Opcode 0x44. */
8951FNIEMOP_DEF(iemOp_inc_eSP)
8952{
8953 /*
8954 * This is a REX prefix in 64-bit mode.
8955 */
8956 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8957 {
8958 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8959 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8960 pVCpu->iem.s.uRexReg = 1 << 3;
8961
8962 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8963 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8964 }
8965
8966 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8967 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8968}
8969
8970
8971/** Opcode 0x45. */
8972FNIEMOP_DEF(iemOp_inc_eBP)
8973{
8974 /*
8975 * This is a REX prefix in 64-bit mode.
8976 */
8977 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8978 {
8979 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8980 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8981 pVCpu->iem.s.uRexReg = 1 << 3;
8982 pVCpu->iem.s.uRexB = 1 << 3;
8983
8984 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8985 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8986 }
8987
8988 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8989 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8990}
8991
8992
8993/** Opcode 0x46. */
8994FNIEMOP_DEF(iemOp_inc_eSI)
8995{
8996 /*
8997 * This is a REX prefix in 64-bit mode.
8998 */
8999 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9000 {
9001 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
9002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
9003 pVCpu->iem.s.uRexReg = 1 << 3;
9004 pVCpu->iem.s.uRexIndex = 1 << 3;
9005
9006 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9007 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9008 }
9009
9010 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
9011 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
9012}
9013
9014
9015/** Opcode 0x47. */
9016FNIEMOP_DEF(iemOp_inc_eDI)
9017{
9018 /*
9019 * This is a REX prefix in 64-bit mode.
9020 */
9021 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9022 {
9023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9025 pVCpu->iem.s.uRexReg = 1 << 3;
9026 pVCpu->iem.s.uRexB = 1 << 3;
9027 pVCpu->iem.s.uRexIndex = 1 << 3;
9028
9029 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9030 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9031 }
9032
9033 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9034 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9035}
9036
9037
9038/** Opcode 0x48. */
9039FNIEMOP_DEF(iemOp_dec_eAX)
9040{
9041 /*
9042 * This is a REX prefix in 64-bit mode.
9043 */
9044 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9045 {
9046 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9047 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9048 iemRecalEffOpSize(pVCpu);
9049
9050 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9051 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9052 }
9053
9054 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9055 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9056}
9057
9058
9059/** Opcode 0x49. */
9060FNIEMOP_DEF(iemOp_dec_eCX)
9061{
9062 /*
9063 * This is a REX prefix in 64-bit mode.
9064 */
9065 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9066 {
9067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9069 pVCpu->iem.s.uRexB = 1 << 3;
9070 iemRecalEffOpSize(pVCpu);
9071
9072 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9073 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9074 }
9075
9076 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9077 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9078}
9079
9080
9081/** Opcode 0x4a. */
9082FNIEMOP_DEF(iemOp_dec_eDX)
9083{
9084 /*
9085 * This is a REX prefix in 64-bit mode.
9086 */
9087 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9088 {
9089 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9090 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9091 pVCpu->iem.s.uRexIndex = 1 << 3;
9092 iemRecalEffOpSize(pVCpu);
9093
9094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9096 }
9097
9098 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9099 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9100}
9101
9102
9103/** Opcode 0x4b. */
9104FNIEMOP_DEF(iemOp_dec_eBX)
9105{
9106 /*
9107 * This is a REX prefix in 64-bit mode.
9108 */
9109 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9110 {
9111 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9112 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9113 pVCpu->iem.s.uRexB = 1 << 3;
9114 pVCpu->iem.s.uRexIndex = 1 << 3;
9115 iemRecalEffOpSize(pVCpu);
9116
9117 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9118 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9119 }
9120
9121 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9122 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9123}
9124
9125
9126/** Opcode 0x4c. */
9127FNIEMOP_DEF(iemOp_dec_eSP)
9128{
9129 /*
9130 * This is a REX prefix in 64-bit mode.
9131 */
9132 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9133 {
9134 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9135 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9136 pVCpu->iem.s.uRexReg = 1 << 3;
9137 iemRecalEffOpSize(pVCpu);
9138
9139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9141 }
9142
9143 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9144 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9145}
9146
9147
9148/** Opcode 0x4d. */
9149FNIEMOP_DEF(iemOp_dec_eBP)
9150{
9151 /*
9152 * This is a REX prefix in 64-bit mode.
9153 */
9154 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9155 {
9156 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9157 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9158 pVCpu->iem.s.uRexReg = 1 << 3;
9159 pVCpu->iem.s.uRexB = 1 << 3;
9160 iemRecalEffOpSize(pVCpu);
9161
9162 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9163 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9164 }
9165
9166 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9167 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9168}
9169
9170
9171/** Opcode 0x4e. */
9172FNIEMOP_DEF(iemOp_dec_eSI)
9173{
9174 /*
9175 * This is a REX prefix in 64-bit mode.
9176 */
9177 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9178 {
9179 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9180 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9181 pVCpu->iem.s.uRexReg = 1 << 3;
9182 pVCpu->iem.s.uRexIndex = 1 << 3;
9183 iemRecalEffOpSize(pVCpu);
9184
9185 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9186 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9187 }
9188
9189 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9190 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9191}
9192
9193
9194/** Opcode 0x4f. */
9195FNIEMOP_DEF(iemOp_dec_eDI)
9196{
9197 /*
9198 * This is a REX prefix in 64-bit mode.
9199 */
9200 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9201 {
9202 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9203 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9204 pVCpu->iem.s.uRexReg = 1 << 3;
9205 pVCpu->iem.s.uRexB = 1 << 3;
9206 pVCpu->iem.s.uRexIndex = 1 << 3;
9207 iemRecalEffOpSize(pVCpu);
9208
9209 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9210 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9211 }
9212
9213 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9214 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9215}
9216
9217
9218/**
9219 * Common 'push register' helper.
9220 */
9221FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9222{
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9225 {
9226 iReg |= pVCpu->iem.s.uRexB;
9227 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9228 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9229 }
9230
9231 switch (pVCpu->iem.s.enmEffOpSize)
9232 {
9233 case IEMMODE_16BIT:
9234 IEM_MC_BEGIN(0, 1);
9235 IEM_MC_LOCAL(uint16_t, u16Value);
9236 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9237 IEM_MC_PUSH_U16(u16Value);
9238 IEM_MC_ADVANCE_RIP();
9239 IEM_MC_END();
9240 break;
9241
9242 case IEMMODE_32BIT:
9243 IEM_MC_BEGIN(0, 1);
9244 IEM_MC_LOCAL(uint32_t, u32Value);
9245 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9246 IEM_MC_PUSH_U32(u32Value);
9247 IEM_MC_ADVANCE_RIP();
9248 IEM_MC_END();
9249 break;
9250
9251 case IEMMODE_64BIT:
9252 IEM_MC_BEGIN(0, 1);
9253 IEM_MC_LOCAL(uint64_t, u64Value);
9254 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9255 IEM_MC_PUSH_U64(u64Value);
9256 IEM_MC_ADVANCE_RIP();
9257 IEM_MC_END();
9258 break;
9259 }
9260
9261 return VINF_SUCCESS;
9262}
9263
9264
9265/** Opcode 0x50. */
9266FNIEMOP_DEF(iemOp_push_eAX)
9267{
9268 IEMOP_MNEMONIC(push_rAX, "push rAX");
9269 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9270}
9271
9272
9273/** Opcode 0x51. */
9274FNIEMOP_DEF(iemOp_push_eCX)
9275{
9276 IEMOP_MNEMONIC(push_rCX, "push rCX");
9277 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9278}
9279
9280
9281/** Opcode 0x52. */
9282FNIEMOP_DEF(iemOp_push_eDX)
9283{
9284 IEMOP_MNEMONIC(push_rDX, "push rDX");
9285 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9286}
9287
9288
9289/** Opcode 0x53. */
9290FNIEMOP_DEF(iemOp_push_eBX)
9291{
9292 IEMOP_MNEMONIC(push_rBX, "push rBX");
9293 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9294}
9295
9296
9297/** Opcode 0x54. */
9298FNIEMOP_DEF(iemOp_push_eSP)
9299{
9300 IEMOP_MNEMONIC(push_rSP, "push rSP");
9301 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9302 {
9303 IEM_MC_BEGIN(0, 1);
9304 IEM_MC_LOCAL(uint16_t, u16Value);
9305 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9306 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9307 IEM_MC_PUSH_U16(u16Value);
9308 IEM_MC_ADVANCE_RIP();
9309 IEM_MC_END();
9310 }
9311 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9312}
9313
9314
9315/** Opcode 0x55. */
9316FNIEMOP_DEF(iemOp_push_eBP)
9317{
9318 IEMOP_MNEMONIC(push_rBP, "push rBP");
9319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9320}
9321
9322
9323/** Opcode 0x56. */
9324FNIEMOP_DEF(iemOp_push_eSI)
9325{
9326 IEMOP_MNEMONIC(push_rSI, "push rSI");
9327 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9328}
9329
9330
9331/** Opcode 0x57. */
9332FNIEMOP_DEF(iemOp_push_eDI)
9333{
9334 IEMOP_MNEMONIC(push_rDI, "push rDI");
9335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9336}
9337
9338
9339/**
9340 * Common 'pop register' helper.
9341 */
9342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9343{
9344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9345 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9346 {
9347 iReg |= pVCpu->iem.s.uRexB;
9348 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9349 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9350 }
9351
9352 switch (pVCpu->iem.s.enmEffOpSize)
9353 {
9354 case IEMMODE_16BIT:
9355 IEM_MC_BEGIN(0, 1);
9356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9358 IEM_MC_POP_U16(pu16Dst);
9359 IEM_MC_ADVANCE_RIP();
9360 IEM_MC_END();
9361 break;
9362
9363 case IEMMODE_32BIT:
9364 IEM_MC_BEGIN(0, 1);
9365 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9366 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9367 IEM_MC_POP_U32(pu32Dst);
9368 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9369 IEM_MC_ADVANCE_RIP();
9370 IEM_MC_END();
9371 break;
9372
9373 case IEMMODE_64BIT:
9374 IEM_MC_BEGIN(0, 1);
9375 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9376 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9377 IEM_MC_POP_U64(pu64Dst);
9378 IEM_MC_ADVANCE_RIP();
9379 IEM_MC_END();
9380 break;
9381 }
9382
9383 return VINF_SUCCESS;
9384}
9385
9386
9387/** Opcode 0x58. */
9388FNIEMOP_DEF(iemOp_pop_eAX)
9389{
9390 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9391 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9392}
9393
9394
9395/** Opcode 0x59. */
9396FNIEMOP_DEF(iemOp_pop_eCX)
9397{
9398 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9400}
9401
9402
9403/** Opcode 0x5a. */
9404FNIEMOP_DEF(iemOp_pop_eDX)
9405{
9406 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9407 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9408}
9409
9410
9411/** Opcode 0x5b. */
9412FNIEMOP_DEF(iemOp_pop_eBX)
9413{
9414 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9416}
9417
9418
9419/** Opcode 0x5c. */
9420FNIEMOP_DEF(iemOp_pop_eSP)
9421{
9422 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9423 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9424 {
9425 if (pVCpu->iem.s.uRexB)
9426 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9427 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9428 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9429 }
9430
9431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9432 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9433 /** @todo add testcase for this instruction. */
9434 switch (pVCpu->iem.s.enmEffOpSize)
9435 {
9436 case IEMMODE_16BIT:
9437 IEM_MC_BEGIN(0, 1);
9438 IEM_MC_LOCAL(uint16_t, u16Dst);
9439 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9440 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9441 IEM_MC_ADVANCE_RIP();
9442 IEM_MC_END();
9443 break;
9444
9445 case IEMMODE_32BIT:
9446 IEM_MC_BEGIN(0, 1);
9447 IEM_MC_LOCAL(uint32_t, u32Dst);
9448 IEM_MC_POP_U32(&u32Dst);
9449 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9450 IEM_MC_ADVANCE_RIP();
9451 IEM_MC_END();
9452 break;
9453
9454 case IEMMODE_64BIT:
9455 IEM_MC_BEGIN(0, 1);
9456 IEM_MC_LOCAL(uint64_t, u64Dst);
9457 IEM_MC_POP_U64(&u64Dst);
9458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9459 IEM_MC_ADVANCE_RIP();
9460 IEM_MC_END();
9461 break;
9462 }
9463
9464 return VINF_SUCCESS;
9465}
9466
9467
9468/** Opcode 0x5d. */
9469FNIEMOP_DEF(iemOp_pop_eBP)
9470{
9471 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9472 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9473}
9474
9475
9476/** Opcode 0x5e. */
9477FNIEMOP_DEF(iemOp_pop_eSI)
9478{
9479 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9480 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9481}
9482
9483
9484/** Opcode 0x5f. */
9485FNIEMOP_DEF(iemOp_pop_eDI)
9486{
9487 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9488 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9489}
9490
9491
9492/** Opcode 0x60. */
9493FNIEMOP_DEF(iemOp_pusha)
9494{
9495 IEMOP_MNEMONIC(pusha, "pusha");
9496 IEMOP_HLP_MIN_186();
9497 IEMOP_HLP_NO_64BIT();
9498 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9499 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9500 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9501 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9502}
9503
9504
9505/** Opcode 0x61. */
9506FNIEMOP_DEF(iemOp_popa)
9507{
9508 IEMOP_MNEMONIC(popa, "popa");
9509 IEMOP_HLP_MIN_186();
9510 IEMOP_HLP_NO_64BIT();
9511 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9512 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9513 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9514 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9515}
9516
9517
9518/** Opcode 0x62. */
9519FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9520// IEMOP_HLP_MIN_186();
9521
9522
9523/** Opcode 0x63 - non-64-bit modes. */
9524FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9525{
9526 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9527 IEMOP_HLP_MIN_286();
9528 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9530
9531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9532 {
9533 /* Register */
9534 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9535 IEM_MC_BEGIN(3, 0);
9536 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9537 IEM_MC_ARG(uint16_t, u16Src, 1);
9538 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9539
9540 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9541 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9542 IEM_MC_REF_EFLAGS(pEFlags);
9543 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9544
9545 IEM_MC_ADVANCE_RIP();
9546 IEM_MC_END();
9547 }
9548 else
9549 {
9550 /* Memory */
9551 IEM_MC_BEGIN(3, 2);
9552 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9553 IEM_MC_ARG(uint16_t, u16Src, 1);
9554 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9556
9557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9558 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9559 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9560 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9561 IEM_MC_FETCH_EFLAGS(EFlags);
9562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9563
9564 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9565 IEM_MC_COMMIT_EFLAGS(EFlags);
9566 IEM_MC_ADVANCE_RIP();
9567 IEM_MC_END();
9568 }
9569 return VINF_SUCCESS;
9570
9571}
9572
9573
9574/** Opcode 0x63.
9575 * @note This is a weird one. It works like a regular move instruction if
9576 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9577 * @todo This definitely needs a testcase to verify the odd cases. */
9578FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9579{
9580 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9581
9582 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9584
9585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9586 {
9587 /*
9588 * Register to register.
9589 */
9590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9591 IEM_MC_BEGIN(0, 1);
9592 IEM_MC_LOCAL(uint64_t, u64Value);
9593 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9594 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9595 IEM_MC_ADVANCE_RIP();
9596 IEM_MC_END();
9597 }
9598 else
9599 {
9600 /*
9601 * We're loading a register from memory.
9602 */
9603 IEM_MC_BEGIN(0, 2);
9604 IEM_MC_LOCAL(uint64_t, u64Value);
9605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9608 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9609 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9610 IEM_MC_ADVANCE_RIP();
9611 IEM_MC_END();
9612 }
9613 return VINF_SUCCESS;
9614}
9615
9616
9617/** Opcode 0x64. */
9618FNIEMOP_DEF(iemOp_seg_FS)
9619{
9620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9621 IEMOP_HLP_MIN_386();
9622
9623 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9624 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9625
9626 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9627 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9628}
9629
9630
9631/** Opcode 0x65. */
9632FNIEMOP_DEF(iemOp_seg_GS)
9633{
9634 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9635 IEMOP_HLP_MIN_386();
9636
9637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9638 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9639
9640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9642}
9643
9644
9645/** Opcode 0x66. */
9646FNIEMOP_DEF(iemOp_op_size)
9647{
9648 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9649 IEMOP_HLP_MIN_386();
9650
9651 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9652 iemRecalEffOpSize(pVCpu);
9653
9654 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9655 when REPZ or REPNZ are present. */
9656 if (pVCpu->iem.s.idxPrefix == 0)
9657 pVCpu->iem.s.idxPrefix = 1;
9658
9659 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9660 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9661}
9662
9663
9664/** Opcode 0x67. */
9665FNIEMOP_DEF(iemOp_addr_size)
9666{
9667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9668 IEMOP_HLP_MIN_386();
9669
9670 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9671 switch (pVCpu->iem.s.enmDefAddrMode)
9672 {
9673 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9674 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9675 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9676 default: AssertFailed();
9677 }
9678
9679 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9680 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9681}
9682
9683
9684/** Opcode 0x68. */
9685FNIEMOP_DEF(iemOp_push_Iz)
9686{
9687 IEMOP_MNEMONIC(push_Iz, "push Iz");
9688 IEMOP_HLP_MIN_186();
9689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9690 switch (pVCpu->iem.s.enmEffOpSize)
9691 {
9692 case IEMMODE_16BIT:
9693 {
9694 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9696 IEM_MC_BEGIN(0,0);
9697 IEM_MC_PUSH_U16(u16Imm);
9698 IEM_MC_ADVANCE_RIP();
9699 IEM_MC_END();
9700 return VINF_SUCCESS;
9701 }
9702
9703 case IEMMODE_32BIT:
9704 {
9705 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9707 IEM_MC_BEGIN(0,0);
9708 IEM_MC_PUSH_U32(u32Imm);
9709 IEM_MC_ADVANCE_RIP();
9710 IEM_MC_END();
9711 return VINF_SUCCESS;
9712 }
9713
9714 case IEMMODE_64BIT:
9715 {
9716 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9718 IEM_MC_BEGIN(0,0);
9719 IEM_MC_PUSH_U64(u64Imm);
9720 IEM_MC_ADVANCE_RIP();
9721 IEM_MC_END();
9722 return VINF_SUCCESS;
9723 }
9724
9725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9726 }
9727}
9728
9729
9730/** Opcode 0x69. */
9731FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9732{
9733 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9734 IEMOP_HLP_MIN_186();
9735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9736 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9737
9738 switch (pVCpu->iem.s.enmEffOpSize)
9739 {
9740 case IEMMODE_16BIT:
9741 {
9742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9743 {
9744 /* register operand */
9745 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9747
9748 IEM_MC_BEGIN(3, 1);
9749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9750 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9752 IEM_MC_LOCAL(uint16_t, u16Tmp);
9753
9754 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9755 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9756 IEM_MC_REF_EFLAGS(pEFlags);
9757 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9758 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9759
9760 IEM_MC_ADVANCE_RIP();
9761 IEM_MC_END();
9762 }
9763 else
9764 {
9765 /* memory operand */
9766 IEM_MC_BEGIN(3, 2);
9767 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9768 IEM_MC_ARG(uint16_t, u16Src, 1);
9769 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9770 IEM_MC_LOCAL(uint16_t, u16Tmp);
9771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9772
9773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9774 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9775 IEM_MC_ASSIGN(u16Src, u16Imm);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9777 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9778 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9779 IEM_MC_REF_EFLAGS(pEFlags);
9780 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9781 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9782
9783 IEM_MC_ADVANCE_RIP();
9784 IEM_MC_END();
9785 }
9786 return VINF_SUCCESS;
9787 }
9788
9789 case IEMMODE_32BIT:
9790 {
9791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9792 {
9793 /* register operand */
9794 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9796
9797 IEM_MC_BEGIN(3, 1);
9798 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9799 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9800 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9801 IEM_MC_LOCAL(uint32_t, u32Tmp);
9802
9803 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9804 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9805 IEM_MC_REF_EFLAGS(pEFlags);
9806 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9807 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9808
9809 IEM_MC_ADVANCE_RIP();
9810 IEM_MC_END();
9811 }
9812 else
9813 {
9814 /* memory operand */
9815 IEM_MC_BEGIN(3, 2);
9816 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9817 IEM_MC_ARG(uint32_t, u32Src, 1);
9818 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9819 IEM_MC_LOCAL(uint32_t, u32Tmp);
9820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9821
9822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9823 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9824 IEM_MC_ASSIGN(u32Src, u32Imm);
9825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9826 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9827 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9828 IEM_MC_REF_EFLAGS(pEFlags);
9829 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9830 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9831
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 }
9835 return VINF_SUCCESS;
9836 }
9837
9838 case IEMMODE_64BIT:
9839 {
9840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9841 {
9842 /* register operand */
9843 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845
9846 IEM_MC_BEGIN(3, 1);
9847 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9848 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9850 IEM_MC_LOCAL(uint64_t, u64Tmp);
9851
9852 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9853 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9854 IEM_MC_REF_EFLAGS(pEFlags);
9855 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9856 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9857
9858 IEM_MC_ADVANCE_RIP();
9859 IEM_MC_END();
9860 }
9861 else
9862 {
9863 /* memory operand */
9864 IEM_MC_BEGIN(3, 2);
9865 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9866 IEM_MC_ARG(uint64_t, u64Src, 1);
9867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9868 IEM_MC_LOCAL(uint64_t, u64Tmp);
9869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9870
9871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9872 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9873 IEM_MC_ASSIGN(u64Src, u64Imm);
9874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9875 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9876 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9877 IEM_MC_REF_EFLAGS(pEFlags);
9878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9879 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9880
9881 IEM_MC_ADVANCE_RIP();
9882 IEM_MC_END();
9883 }
9884 return VINF_SUCCESS;
9885 }
9886 }
9887 AssertFailedReturn(VERR_IEM_IPE_9);
9888}
9889
9890
9891/** Opcode 0x6a. */
9892FNIEMOP_DEF(iemOp_push_Ib)
9893{
9894 IEMOP_MNEMONIC(push_Ib, "push Ib");
9895 IEMOP_HLP_MIN_186();
9896 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9899
9900 IEM_MC_BEGIN(0,0);
9901 switch (pVCpu->iem.s.enmEffOpSize)
9902 {
9903 case IEMMODE_16BIT:
9904 IEM_MC_PUSH_U16(i8Imm);
9905 break;
9906 case IEMMODE_32BIT:
9907 IEM_MC_PUSH_U32(i8Imm);
9908 break;
9909 case IEMMODE_64BIT:
9910 IEM_MC_PUSH_U64(i8Imm);
9911 break;
9912 }
9913 IEM_MC_ADVANCE_RIP();
9914 IEM_MC_END();
9915 return VINF_SUCCESS;
9916}
9917
9918
9919/** Opcode 0x6b. */
9920FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9921{
9922 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9923 IEMOP_HLP_MIN_186();
9924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9926
9927 switch (pVCpu->iem.s.enmEffOpSize)
9928 {
9929 case IEMMODE_16BIT:
9930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9931 {
9932 /* register operand */
9933 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9935
9936 IEM_MC_BEGIN(3, 1);
9937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9938 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9940 IEM_MC_LOCAL(uint16_t, u16Tmp);
9941
9942 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9943 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9944 IEM_MC_REF_EFLAGS(pEFlags);
9945 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9947
9948 IEM_MC_ADVANCE_RIP();
9949 IEM_MC_END();
9950 }
9951 else
9952 {
9953 /* memory operand */
9954 IEM_MC_BEGIN(3, 2);
9955 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9956 IEM_MC_ARG(uint16_t, u16Src, 1);
9957 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9958 IEM_MC_LOCAL(uint16_t, u16Tmp);
9959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9960
9961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9962 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9963 IEM_MC_ASSIGN(u16Src, u16Imm);
9964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9965 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9966 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9967 IEM_MC_REF_EFLAGS(pEFlags);
9968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9969 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9970
9971 IEM_MC_ADVANCE_RIP();
9972 IEM_MC_END();
9973 }
9974 return VINF_SUCCESS;
9975
9976 case IEMMODE_32BIT:
9977 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9978 {
9979 /* register operand */
9980 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982
9983 IEM_MC_BEGIN(3, 1);
9984 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9985 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9987 IEM_MC_LOCAL(uint32_t, u32Tmp);
9988
9989 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9990 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9991 IEM_MC_REF_EFLAGS(pEFlags);
9992 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9993 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9994
9995 IEM_MC_ADVANCE_RIP();
9996 IEM_MC_END();
9997 }
9998 else
9999 {
10000 /* memory operand */
10001 IEM_MC_BEGIN(3, 2);
10002 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10003 IEM_MC_ARG(uint32_t, u32Src, 1);
10004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10005 IEM_MC_LOCAL(uint32_t, u32Tmp);
10006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10007
10008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10009 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
10010 IEM_MC_ASSIGN(u32Src, u32Imm);
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10013 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10014 IEM_MC_REF_EFLAGS(pEFlags);
10015 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10016 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10017
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 }
10021 return VINF_SUCCESS;
10022
10023 case IEMMODE_64BIT:
10024 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10025 {
10026 /* register operand */
10027 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10029
10030 IEM_MC_BEGIN(3, 1);
10031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10034 IEM_MC_LOCAL(uint64_t, u64Tmp);
10035
10036 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10038 IEM_MC_REF_EFLAGS(pEFlags);
10039 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10040 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10041
10042 IEM_MC_ADVANCE_RIP();
10043 IEM_MC_END();
10044 }
10045 else
10046 {
10047 /* memory operand */
10048 IEM_MC_BEGIN(3, 2);
10049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10050 IEM_MC_ARG(uint64_t, u64Src, 1);
10051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10052 IEM_MC_LOCAL(uint64_t, u64Tmp);
10053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10054
10055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10056 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10057 IEM_MC_ASSIGN(u64Src, u64Imm);
10058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10061 IEM_MC_REF_EFLAGS(pEFlags);
10062 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10063 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10064
10065 IEM_MC_ADVANCE_RIP();
10066 IEM_MC_END();
10067 }
10068 return VINF_SUCCESS;
10069 }
10070 AssertFailedReturn(VERR_IEM_IPE_8);
10071}
10072
10073
10074/** Opcode 0x6c. */
10075FNIEMOP_DEF(iemOp_insb_Yb_DX)
10076{
10077 IEMOP_HLP_MIN_186();
10078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10079 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10080 {
10081 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10082 switch (pVCpu->iem.s.enmEffAddrMode)
10083 {
10084 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10085 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10086 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10088 }
10089 }
10090 else
10091 {
10092 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10093 switch (pVCpu->iem.s.enmEffAddrMode)
10094 {
10095 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10096 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10097 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10099 }
10100 }
10101}
10102
10103
10104/** Opcode 0x6d. */
10105FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10106{
10107 IEMOP_HLP_MIN_186();
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10110 {
10111 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10112 switch (pVCpu->iem.s.enmEffOpSize)
10113 {
10114 case IEMMODE_16BIT:
10115 switch (pVCpu->iem.s.enmEffAddrMode)
10116 {
10117 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10118 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10119 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10121 }
10122 break;
10123 case IEMMODE_64BIT:
10124 case IEMMODE_32BIT:
10125 switch (pVCpu->iem.s.enmEffAddrMode)
10126 {
10127 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10128 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10129 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10131 }
10132 break;
10133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10134 }
10135 }
10136 else
10137 {
10138 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10139 switch (pVCpu->iem.s.enmEffOpSize)
10140 {
10141 case IEMMODE_16BIT:
10142 switch (pVCpu->iem.s.enmEffAddrMode)
10143 {
10144 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10145 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10146 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10148 }
10149 break;
10150 case IEMMODE_64BIT:
10151 case IEMMODE_32BIT:
10152 switch (pVCpu->iem.s.enmEffAddrMode)
10153 {
10154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10158 }
10159 break;
10160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10161 }
10162 }
10163}
10164
10165
10166/** Opcode 0x6e. */
10167FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10168{
10169 IEMOP_HLP_MIN_186();
10170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10171 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10172 {
10173 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10174 switch (pVCpu->iem.s.enmEffAddrMode)
10175 {
10176 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10177 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10178 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10180 }
10181 }
10182 else
10183 {
10184 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10185 switch (pVCpu->iem.s.enmEffAddrMode)
10186 {
10187 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10191 }
10192 }
10193}
10194
10195
10196/** Opcode 0x6f. */
10197FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10198{
10199 IEMOP_HLP_MIN_186();
10200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10201 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10202 {
10203 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10204 switch (pVCpu->iem.s.enmEffOpSize)
10205 {
10206 case IEMMODE_16BIT:
10207 switch (pVCpu->iem.s.enmEffAddrMode)
10208 {
10209 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10210 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10211 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10213 }
10214 break;
10215 case IEMMODE_64BIT:
10216 case IEMMODE_32BIT:
10217 switch (pVCpu->iem.s.enmEffAddrMode)
10218 {
10219 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10220 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10221 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10223 }
10224 break;
10225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10226 }
10227 }
10228 else
10229 {
10230 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10231 switch (pVCpu->iem.s.enmEffOpSize)
10232 {
10233 case IEMMODE_16BIT:
10234 switch (pVCpu->iem.s.enmEffAddrMode)
10235 {
10236 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10237 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10238 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10240 }
10241 break;
10242 case IEMMODE_64BIT:
10243 case IEMMODE_32BIT:
10244 switch (pVCpu->iem.s.enmEffAddrMode)
10245 {
10246 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10247 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10248 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10250 }
10251 break;
10252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10253 }
10254 }
10255}
10256
10257
10258/** Opcode 0x70. */
10259FNIEMOP_DEF(iemOp_jo_Jb)
10260{
10261 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10262 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10265
10266 IEM_MC_BEGIN(0, 0);
10267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10268 IEM_MC_REL_JMP_S8(i8Imm);
10269 } IEM_MC_ELSE() {
10270 IEM_MC_ADVANCE_RIP();
10271 } IEM_MC_ENDIF();
10272 IEM_MC_END();
10273 return VINF_SUCCESS;
10274}
10275
10276
10277/** Opcode 0x71. */
10278FNIEMOP_DEF(iemOp_jno_Jb)
10279{
10280 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10281 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10283 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10284
10285 IEM_MC_BEGIN(0, 0);
10286 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10287 IEM_MC_ADVANCE_RIP();
10288 } IEM_MC_ELSE() {
10289 IEM_MC_REL_JMP_S8(i8Imm);
10290 } IEM_MC_ENDIF();
10291 IEM_MC_END();
10292 return VINF_SUCCESS;
10293}
10294
10295/** Opcode 0x72. */
10296FNIEMOP_DEF(iemOp_jc_Jb)
10297{
10298 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10299 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10302
10303 IEM_MC_BEGIN(0, 0);
10304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10305 IEM_MC_REL_JMP_S8(i8Imm);
10306 } IEM_MC_ELSE() {
10307 IEM_MC_ADVANCE_RIP();
10308 } IEM_MC_ENDIF();
10309 IEM_MC_END();
10310 return VINF_SUCCESS;
10311}
10312
10313
10314/** Opcode 0x73. */
10315FNIEMOP_DEF(iemOp_jnc_Jb)
10316{
10317 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10318 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10320 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10321
10322 IEM_MC_BEGIN(0, 0);
10323 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10324 IEM_MC_ADVANCE_RIP();
10325 } IEM_MC_ELSE() {
10326 IEM_MC_REL_JMP_S8(i8Imm);
10327 } IEM_MC_ENDIF();
10328 IEM_MC_END();
10329 return VINF_SUCCESS;
10330}
10331
10332
10333/** Opcode 0x74. */
10334FNIEMOP_DEF(iemOp_je_Jb)
10335{
10336 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10337 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10340
10341 IEM_MC_BEGIN(0, 0);
10342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10343 IEM_MC_REL_JMP_S8(i8Imm);
10344 } IEM_MC_ELSE() {
10345 IEM_MC_ADVANCE_RIP();
10346 } IEM_MC_ENDIF();
10347 IEM_MC_END();
10348 return VINF_SUCCESS;
10349}
10350
10351
10352/** Opcode 0x75. */
10353FNIEMOP_DEF(iemOp_jne_Jb)
10354{
10355 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10356 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10359
10360 IEM_MC_BEGIN(0, 0);
10361 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10362 IEM_MC_ADVANCE_RIP();
10363 } IEM_MC_ELSE() {
10364 IEM_MC_REL_JMP_S8(i8Imm);
10365 } IEM_MC_ENDIF();
10366 IEM_MC_END();
10367 return VINF_SUCCESS;
10368}
10369
10370
10371/** Opcode 0x76. */
10372FNIEMOP_DEF(iemOp_jbe_Jb)
10373{
10374 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10375 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10377 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10378
10379 IEM_MC_BEGIN(0, 0);
10380 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10381 IEM_MC_REL_JMP_S8(i8Imm);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_ADVANCE_RIP();
10384 } IEM_MC_ENDIF();
10385 IEM_MC_END();
10386 return VINF_SUCCESS;
10387}
10388
10389
10390/** Opcode 0x77. */
10391FNIEMOP_DEF(iemOp_jnbe_Jb)
10392{
10393 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10397
10398 IEM_MC_BEGIN(0, 0);
10399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10400 IEM_MC_ADVANCE_RIP();
10401 } IEM_MC_ELSE() {
10402 IEM_MC_REL_JMP_S8(i8Imm);
10403 } IEM_MC_ENDIF();
10404 IEM_MC_END();
10405 return VINF_SUCCESS;
10406}
10407
10408
10409/** Opcode 0x78. */
10410FNIEMOP_DEF(iemOp_js_Jb)
10411{
10412 IEMOP_MNEMONIC(js_Jb, "js Jb");
10413 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10416
10417 IEM_MC_BEGIN(0, 0);
10418 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10419 IEM_MC_REL_JMP_S8(i8Imm);
10420 } IEM_MC_ELSE() {
10421 IEM_MC_ADVANCE_RIP();
10422 } IEM_MC_ENDIF();
10423 IEM_MC_END();
10424 return VINF_SUCCESS;
10425}
10426
10427
10428/** Opcode 0x79. */
10429FNIEMOP_DEF(iemOp_jns_Jb)
10430{
10431 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10432 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10434 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10435
10436 IEM_MC_BEGIN(0, 0);
10437 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10438 IEM_MC_ADVANCE_RIP();
10439 } IEM_MC_ELSE() {
10440 IEM_MC_REL_JMP_S8(i8Imm);
10441 } IEM_MC_ENDIF();
10442 IEM_MC_END();
10443 return VINF_SUCCESS;
10444}
10445
10446
10447/** Opcode 0x7a. */
10448FNIEMOP_DEF(iemOp_jp_Jb)
10449{
10450 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10451 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10454
10455 IEM_MC_BEGIN(0, 0);
10456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10457 IEM_MC_REL_JMP_S8(i8Imm);
10458 } IEM_MC_ELSE() {
10459 IEM_MC_ADVANCE_RIP();
10460 } IEM_MC_ENDIF();
10461 IEM_MC_END();
10462 return VINF_SUCCESS;
10463}
10464
10465
10466/** Opcode 0x7b. */
10467FNIEMOP_DEF(iemOp_jnp_Jb)
10468{
10469 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10470 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10473
10474 IEM_MC_BEGIN(0, 0);
10475 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10476 IEM_MC_ADVANCE_RIP();
10477 } IEM_MC_ELSE() {
10478 IEM_MC_REL_JMP_S8(i8Imm);
10479 } IEM_MC_ENDIF();
10480 IEM_MC_END();
10481 return VINF_SUCCESS;
10482}
10483
10484
10485/** Opcode 0x7c. */
10486FNIEMOP_DEF(iemOp_jl_Jb)
10487{
10488 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10489 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10492
10493 IEM_MC_BEGIN(0, 0);
10494 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10495 IEM_MC_REL_JMP_S8(i8Imm);
10496 } IEM_MC_ELSE() {
10497 IEM_MC_ADVANCE_RIP();
10498 } IEM_MC_ENDIF();
10499 IEM_MC_END();
10500 return VINF_SUCCESS;
10501}
10502
10503
10504/** Opcode 0x7d. */
10505FNIEMOP_DEF(iemOp_jnl_Jb)
10506{
10507 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10508 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10511
10512 IEM_MC_BEGIN(0, 0);
10513 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10514 IEM_MC_ADVANCE_RIP();
10515 } IEM_MC_ELSE() {
10516 IEM_MC_REL_JMP_S8(i8Imm);
10517 } IEM_MC_ENDIF();
10518 IEM_MC_END();
10519 return VINF_SUCCESS;
10520}
10521
10522
10523/** Opcode 0x7e. */
10524FNIEMOP_DEF(iemOp_jle_Jb)
10525{
10526 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10527 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10529 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10530
10531 IEM_MC_BEGIN(0, 0);
10532 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10533 IEM_MC_REL_JMP_S8(i8Imm);
10534 } IEM_MC_ELSE() {
10535 IEM_MC_ADVANCE_RIP();
10536 } IEM_MC_ENDIF();
10537 IEM_MC_END();
10538 return VINF_SUCCESS;
10539}
10540
10541
10542/** Opcode 0x7f. */
10543FNIEMOP_DEF(iemOp_jnle_Jb)
10544{
10545 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10546 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10549
10550 IEM_MC_BEGIN(0, 0);
10551 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10552 IEM_MC_ADVANCE_RIP();
10553 } IEM_MC_ELSE() {
10554 IEM_MC_REL_JMP_S8(i8Imm);
10555 } IEM_MC_ENDIF();
10556 IEM_MC_END();
10557 return VINF_SUCCESS;
10558}
10559
10560
10561/** Opcode 0x80. */
10562FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10563{
10564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10565 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10566 {
10567 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10568 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10569 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10570 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10571 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10572 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10573 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10574 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10575 }
10576 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10577
10578 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10579 {
10580 /* register target */
10581 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10583 IEM_MC_BEGIN(3, 0);
10584 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10585 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10586 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10587
10588 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10589 IEM_MC_REF_EFLAGS(pEFlags);
10590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10591
10592 IEM_MC_ADVANCE_RIP();
10593 IEM_MC_END();
10594 }
10595 else
10596 {
10597 /* memory target */
10598 uint32_t fAccess;
10599 if (pImpl->pfnLockedU8)
10600 fAccess = IEM_ACCESS_DATA_RW;
10601 else /* CMP */
10602 fAccess = IEM_ACCESS_DATA_R;
10603 IEM_MC_BEGIN(3, 2);
10604 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10605 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10607
10608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10609 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10610 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10611 if (pImpl->pfnLockedU8)
10612 IEMOP_HLP_DONE_DECODING();
10613 else
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615
10616 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10617 IEM_MC_FETCH_EFLAGS(EFlags);
10618 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10620 else
10621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10622
10623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10624 IEM_MC_COMMIT_EFLAGS(EFlags);
10625 IEM_MC_ADVANCE_RIP();
10626 IEM_MC_END();
10627 }
10628 return VINF_SUCCESS;
10629}
10630
10631
10632/** Opcode 0x81. */
10633FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10634{
10635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10636 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10637 {
10638 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10639 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10640 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10641 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10642 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10643 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10644 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10645 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10646 }
10647 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10648
10649 switch (pVCpu->iem.s.enmEffOpSize)
10650 {
10651 case IEMMODE_16BIT:
10652 {
10653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10654 {
10655 /* register target */
10656 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10658 IEM_MC_BEGIN(3, 0);
10659 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10660 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10662
10663 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10664 IEM_MC_REF_EFLAGS(pEFlags);
10665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10666
10667 IEM_MC_ADVANCE_RIP();
10668 IEM_MC_END();
10669 }
10670 else
10671 {
10672 /* memory target */
10673 uint32_t fAccess;
10674 if (pImpl->pfnLockedU16)
10675 fAccess = IEM_ACCESS_DATA_RW;
10676 else /* CMP, TEST */
10677 fAccess = IEM_ACCESS_DATA_R;
10678 IEM_MC_BEGIN(3, 2);
10679 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10680 IEM_MC_ARG(uint16_t, u16Src, 1);
10681 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10683
10684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10685 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10686 IEM_MC_ASSIGN(u16Src, u16Imm);
10687 if (pImpl->pfnLockedU16)
10688 IEMOP_HLP_DONE_DECODING();
10689 else
10690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10691 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10692 IEM_MC_FETCH_EFLAGS(EFlags);
10693 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10694 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10695 else
10696 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10697
10698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10699 IEM_MC_COMMIT_EFLAGS(EFlags);
10700 IEM_MC_ADVANCE_RIP();
10701 IEM_MC_END();
10702 }
10703 break;
10704 }
10705
10706 case IEMMODE_32BIT:
10707 {
10708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10709 {
10710 /* register target */
10711 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10713 IEM_MC_BEGIN(3, 0);
10714 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10715 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10716 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10717
10718 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10719 IEM_MC_REF_EFLAGS(pEFlags);
10720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10721 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10722
10723 IEM_MC_ADVANCE_RIP();
10724 IEM_MC_END();
10725 }
10726 else
10727 {
10728 /* memory target */
10729 uint32_t fAccess;
10730 if (pImpl->pfnLockedU32)
10731 fAccess = IEM_ACCESS_DATA_RW;
10732 else /* CMP, TEST */
10733 fAccess = IEM_ACCESS_DATA_R;
10734 IEM_MC_BEGIN(3, 2);
10735 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10736 IEM_MC_ARG(uint32_t, u32Src, 1);
10737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10739
10740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10741 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10742 IEM_MC_ASSIGN(u32Src, u32Imm);
10743 if (pImpl->pfnLockedU32)
10744 IEMOP_HLP_DONE_DECODING();
10745 else
10746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10747 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10748 IEM_MC_FETCH_EFLAGS(EFlags);
10749 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10750 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10751 else
10752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10753
10754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10755 IEM_MC_COMMIT_EFLAGS(EFlags);
10756 IEM_MC_ADVANCE_RIP();
10757 IEM_MC_END();
10758 }
10759 break;
10760 }
10761
10762 case IEMMODE_64BIT:
10763 {
10764 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10765 {
10766 /* register target */
10767 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10769 IEM_MC_BEGIN(3, 0);
10770 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10771 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10773
10774 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10775 IEM_MC_REF_EFLAGS(pEFlags);
10776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10777
10778 IEM_MC_ADVANCE_RIP();
10779 IEM_MC_END();
10780 }
10781 else
10782 {
10783 /* memory target */
10784 uint32_t fAccess;
10785 if (pImpl->pfnLockedU64)
10786 fAccess = IEM_ACCESS_DATA_RW;
10787 else /* CMP */
10788 fAccess = IEM_ACCESS_DATA_R;
10789 IEM_MC_BEGIN(3, 2);
10790 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10791 IEM_MC_ARG(uint64_t, u64Src, 1);
10792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10794
10795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10796 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10797 if (pImpl->pfnLockedU64)
10798 IEMOP_HLP_DONE_DECODING();
10799 else
10800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10801 IEM_MC_ASSIGN(u64Src, u64Imm);
10802 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10803 IEM_MC_FETCH_EFLAGS(EFlags);
10804 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10805 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10806 else
10807 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10808
10809 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10810 IEM_MC_COMMIT_EFLAGS(EFlags);
10811 IEM_MC_ADVANCE_RIP();
10812 IEM_MC_END();
10813 }
10814 break;
10815 }
10816 }
10817 return VINF_SUCCESS;
10818}
10819
10820
10821/** Opcode 0x82. */
10822FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10823{
10824 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10825 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10826}
10827
10828
10829/** Opcode 0x83. */
10830FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10831{
10832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10833 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10834 {
10835 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10836 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10837 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10838 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10839 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10840 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10841 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10842 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10843 }
10844 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10845 to the 386 even if absent in the intel reference manuals and some
10846 3rd party opcode listings. */
10847 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10848
10849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10850 {
10851 /*
10852 * Register target
10853 */
10854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10855 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10856 switch (pVCpu->iem.s.enmEffOpSize)
10857 {
10858 case IEMMODE_16BIT:
10859 {
10860 IEM_MC_BEGIN(3, 0);
10861 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10862 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10863 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10864
10865 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10866 IEM_MC_REF_EFLAGS(pEFlags);
10867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10868
10869 IEM_MC_ADVANCE_RIP();
10870 IEM_MC_END();
10871 break;
10872 }
10873
10874 case IEMMODE_32BIT:
10875 {
10876 IEM_MC_BEGIN(3, 0);
10877 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10878 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10880
10881 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10882 IEM_MC_REF_EFLAGS(pEFlags);
10883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10884 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10885
10886 IEM_MC_ADVANCE_RIP();
10887 IEM_MC_END();
10888 break;
10889 }
10890
10891 case IEMMODE_64BIT:
10892 {
10893 IEM_MC_BEGIN(3, 0);
10894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10895 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10897
10898 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10899 IEM_MC_REF_EFLAGS(pEFlags);
10900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10901
10902 IEM_MC_ADVANCE_RIP();
10903 IEM_MC_END();
10904 break;
10905 }
10906 }
10907 }
10908 else
10909 {
10910 /*
10911 * Memory target.
10912 */
10913 uint32_t fAccess;
10914 if (pImpl->pfnLockedU16)
10915 fAccess = IEM_ACCESS_DATA_RW;
10916 else /* CMP */
10917 fAccess = IEM_ACCESS_DATA_R;
10918
10919 switch (pVCpu->iem.s.enmEffOpSize)
10920 {
10921 case IEMMODE_16BIT:
10922 {
10923 IEM_MC_BEGIN(3, 2);
10924 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10925 IEM_MC_ARG(uint16_t, u16Src, 1);
10926 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10928
10929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10930 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10931 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10932 if (pImpl->pfnLockedU16)
10933 IEMOP_HLP_DONE_DECODING();
10934 else
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10937 IEM_MC_FETCH_EFLAGS(EFlags);
10938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10940 else
10941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10942
10943 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10944 IEM_MC_COMMIT_EFLAGS(EFlags);
10945 IEM_MC_ADVANCE_RIP();
10946 IEM_MC_END();
10947 break;
10948 }
10949
10950 case IEMMODE_32BIT:
10951 {
10952 IEM_MC_BEGIN(3, 2);
10953 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10954 IEM_MC_ARG(uint32_t, u32Src, 1);
10955 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10957
10958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10960 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10961 if (pImpl->pfnLockedU32)
10962 IEMOP_HLP_DONE_DECODING();
10963 else
10964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10965 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10966 IEM_MC_FETCH_EFLAGS(EFlags);
10967 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10969 else
10970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10971
10972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10973 IEM_MC_COMMIT_EFLAGS(EFlags);
10974 IEM_MC_ADVANCE_RIP();
10975 IEM_MC_END();
10976 break;
10977 }
10978
10979 case IEMMODE_64BIT:
10980 {
10981 IEM_MC_BEGIN(3, 2);
10982 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10983 IEM_MC_ARG(uint64_t, u64Src, 1);
10984 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10986
10987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10988 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10989 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10990 if (pImpl->pfnLockedU64)
10991 IEMOP_HLP_DONE_DECODING();
10992 else
10993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10994 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10995 IEM_MC_FETCH_EFLAGS(EFlags);
10996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10998 else
10999 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
11000
11001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
11002 IEM_MC_COMMIT_EFLAGS(EFlags);
11003 IEM_MC_ADVANCE_RIP();
11004 IEM_MC_END();
11005 break;
11006 }
11007 }
11008 }
11009 return VINF_SUCCESS;
11010}
11011
11012
11013/** Opcode 0x84. */
11014FNIEMOP_DEF(iemOp_test_Eb_Gb)
11015{
11016 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11017 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11018 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11019}
11020
11021
11022/** Opcode 0x85. */
11023FNIEMOP_DEF(iemOp_test_Ev_Gv)
11024{
11025 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11026 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11027 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11028}
11029
11030
11031/** Opcode 0x86. */
11032FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11033{
11034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11035 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11036
11037 /*
11038 * If rm is denoting a register, no more instruction bytes.
11039 */
11040 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11041 {
11042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11043
11044 IEM_MC_BEGIN(0, 2);
11045 IEM_MC_LOCAL(uint8_t, uTmp1);
11046 IEM_MC_LOCAL(uint8_t, uTmp2);
11047
11048 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11049 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11050 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11051 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11052
11053 IEM_MC_ADVANCE_RIP();
11054 IEM_MC_END();
11055 }
11056 else
11057 {
11058 /*
11059 * We're accessing memory.
11060 */
11061/** @todo the register must be committed separately! */
11062 IEM_MC_BEGIN(2, 2);
11063 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11064 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11066
11067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11068 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11069 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11070 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11071 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11072
11073 IEM_MC_ADVANCE_RIP();
11074 IEM_MC_END();
11075 }
11076 return VINF_SUCCESS;
11077}
11078
11079
11080/** Opcode 0x87. */
11081FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11082{
11083 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11085
11086 /*
11087 * If rm is denoting a register, no more instruction bytes.
11088 */
11089 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11090 {
11091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11092
11093 switch (pVCpu->iem.s.enmEffOpSize)
11094 {
11095 case IEMMODE_16BIT:
11096 IEM_MC_BEGIN(0, 2);
11097 IEM_MC_LOCAL(uint16_t, uTmp1);
11098 IEM_MC_LOCAL(uint16_t, uTmp2);
11099
11100 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11101 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11102 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11103 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11104
11105 IEM_MC_ADVANCE_RIP();
11106 IEM_MC_END();
11107 return VINF_SUCCESS;
11108
11109 case IEMMODE_32BIT:
11110 IEM_MC_BEGIN(0, 2);
11111 IEM_MC_LOCAL(uint32_t, uTmp1);
11112 IEM_MC_LOCAL(uint32_t, uTmp2);
11113
11114 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11115 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11116 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11117 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11118
11119 IEM_MC_ADVANCE_RIP();
11120 IEM_MC_END();
11121 return VINF_SUCCESS;
11122
11123 case IEMMODE_64BIT:
11124 IEM_MC_BEGIN(0, 2);
11125 IEM_MC_LOCAL(uint64_t, uTmp1);
11126 IEM_MC_LOCAL(uint64_t, uTmp2);
11127
11128 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11129 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11131 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11132
11133 IEM_MC_ADVANCE_RIP();
11134 IEM_MC_END();
11135 return VINF_SUCCESS;
11136
11137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11138 }
11139 }
11140 else
11141 {
11142 /*
11143 * We're accessing memory.
11144 */
11145 switch (pVCpu->iem.s.enmEffOpSize)
11146 {
11147/** @todo the register must be committed separately! */
11148 case IEMMODE_16BIT:
11149 IEM_MC_BEGIN(2, 2);
11150 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11151 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11153
11154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11155 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11156 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11157 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11159
11160 IEM_MC_ADVANCE_RIP();
11161 IEM_MC_END();
11162 return VINF_SUCCESS;
11163
11164 case IEMMODE_32BIT:
11165 IEM_MC_BEGIN(2, 2);
11166 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11167 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11169
11170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11171 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11172 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11173 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11175
11176 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11177 IEM_MC_ADVANCE_RIP();
11178 IEM_MC_END();
11179 return VINF_SUCCESS;
11180
11181 case IEMMODE_64BIT:
11182 IEM_MC_BEGIN(2, 2);
11183 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11184 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11186
11187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11188 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11189 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11190 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11191 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11192
11193 IEM_MC_ADVANCE_RIP();
11194 IEM_MC_END();
11195 return VINF_SUCCESS;
11196
11197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11198 }
11199 }
11200}
11201
11202
11203/** Opcode 0x88. */
11204FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11205{
11206 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11207
11208 uint8_t bRm;
11209 IEM_OPCODE_GET_NEXT_U8(&bRm);
11210
11211 /*
11212 * If rm is denoting a register, no more instruction bytes.
11213 */
11214 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11215 {
11216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11217 IEM_MC_BEGIN(0, 1);
11218 IEM_MC_LOCAL(uint8_t, u8Value);
11219 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11220 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11221 IEM_MC_ADVANCE_RIP();
11222 IEM_MC_END();
11223 }
11224 else
11225 {
11226 /*
11227 * We're writing a register to memory.
11228 */
11229 IEM_MC_BEGIN(0, 2);
11230 IEM_MC_LOCAL(uint8_t, u8Value);
11231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11235 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11236 IEM_MC_ADVANCE_RIP();
11237 IEM_MC_END();
11238 }
11239 return VINF_SUCCESS;
11240
11241}
11242
11243
11244/** Opcode 0x89. */
11245FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11246{
11247 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11248
11249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11250
11251 /*
11252 * If rm is denoting a register, no more instruction bytes.
11253 */
11254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11255 {
11256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11257 switch (pVCpu->iem.s.enmEffOpSize)
11258 {
11259 case IEMMODE_16BIT:
11260 IEM_MC_BEGIN(0, 1);
11261 IEM_MC_LOCAL(uint16_t, u16Value);
11262 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11263 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11264 IEM_MC_ADVANCE_RIP();
11265 IEM_MC_END();
11266 break;
11267
11268 case IEMMODE_32BIT:
11269 IEM_MC_BEGIN(0, 1);
11270 IEM_MC_LOCAL(uint32_t, u32Value);
11271 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11272 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11273 IEM_MC_ADVANCE_RIP();
11274 IEM_MC_END();
11275 break;
11276
11277 case IEMMODE_64BIT:
11278 IEM_MC_BEGIN(0, 1);
11279 IEM_MC_LOCAL(uint64_t, u64Value);
11280 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11281 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11282 IEM_MC_ADVANCE_RIP();
11283 IEM_MC_END();
11284 break;
11285 }
11286 }
11287 else
11288 {
11289 /*
11290 * We're writing a register to memory.
11291 */
11292 switch (pVCpu->iem.s.enmEffOpSize)
11293 {
11294 case IEMMODE_16BIT:
11295 IEM_MC_BEGIN(0, 2);
11296 IEM_MC_LOCAL(uint16_t, u16Value);
11297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11300 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11301 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11302 IEM_MC_ADVANCE_RIP();
11303 IEM_MC_END();
11304 break;
11305
11306 case IEMMODE_32BIT:
11307 IEM_MC_BEGIN(0, 2);
11308 IEM_MC_LOCAL(uint32_t, u32Value);
11309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11312 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11313 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11314 IEM_MC_ADVANCE_RIP();
11315 IEM_MC_END();
11316 break;
11317
11318 case IEMMODE_64BIT:
11319 IEM_MC_BEGIN(0, 2);
11320 IEM_MC_LOCAL(uint64_t, u64Value);
11321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11325 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11326 IEM_MC_ADVANCE_RIP();
11327 IEM_MC_END();
11328 break;
11329 }
11330 }
11331 return VINF_SUCCESS;
11332}
11333
11334
11335/** Opcode 0x8a. */
11336FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11337{
11338 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11339
11340 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11341
11342 /*
11343 * If rm is denoting a register, no more instruction bytes.
11344 */
11345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11346 {
11347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11348 IEM_MC_BEGIN(0, 1);
11349 IEM_MC_LOCAL(uint8_t, u8Value);
11350 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11351 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11352 IEM_MC_ADVANCE_RIP();
11353 IEM_MC_END();
11354 }
11355 else
11356 {
11357 /*
11358 * We're loading a register from memory.
11359 */
11360 IEM_MC_BEGIN(0, 2);
11361 IEM_MC_LOCAL(uint8_t, u8Value);
11362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11365 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11366 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11367 IEM_MC_ADVANCE_RIP();
11368 IEM_MC_END();
11369 }
11370 return VINF_SUCCESS;
11371}
11372
11373
11374/** Opcode 0x8b. */
11375FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11376{
11377 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11378
11379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11380
11381 /*
11382 * If rm is denoting a register, no more instruction bytes.
11383 */
11384 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11385 {
11386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11387 switch (pVCpu->iem.s.enmEffOpSize)
11388 {
11389 case IEMMODE_16BIT:
11390 IEM_MC_BEGIN(0, 1);
11391 IEM_MC_LOCAL(uint16_t, u16Value);
11392 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11393 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11394 IEM_MC_ADVANCE_RIP();
11395 IEM_MC_END();
11396 break;
11397
11398 case IEMMODE_32BIT:
11399 IEM_MC_BEGIN(0, 1);
11400 IEM_MC_LOCAL(uint32_t, u32Value);
11401 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11402 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11403 IEM_MC_ADVANCE_RIP();
11404 IEM_MC_END();
11405 break;
11406
11407 case IEMMODE_64BIT:
11408 IEM_MC_BEGIN(0, 1);
11409 IEM_MC_LOCAL(uint64_t, u64Value);
11410 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11411 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11412 IEM_MC_ADVANCE_RIP();
11413 IEM_MC_END();
11414 break;
11415 }
11416 }
11417 else
11418 {
11419 /*
11420 * We're loading a register from memory.
11421 */
11422 switch (pVCpu->iem.s.enmEffOpSize)
11423 {
11424 case IEMMODE_16BIT:
11425 IEM_MC_BEGIN(0, 2);
11426 IEM_MC_LOCAL(uint16_t, u16Value);
11427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11430 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11431 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11432 IEM_MC_ADVANCE_RIP();
11433 IEM_MC_END();
11434 break;
11435
11436 case IEMMODE_32BIT:
11437 IEM_MC_BEGIN(0, 2);
11438 IEM_MC_LOCAL(uint32_t, u32Value);
11439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11442 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11443 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11444 IEM_MC_ADVANCE_RIP();
11445 IEM_MC_END();
11446 break;
11447
11448 case IEMMODE_64BIT:
11449 IEM_MC_BEGIN(0, 2);
11450 IEM_MC_LOCAL(uint64_t, u64Value);
11451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11454 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11455 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11456 IEM_MC_ADVANCE_RIP();
11457 IEM_MC_END();
11458 break;
11459 }
11460 }
11461 return VINF_SUCCESS;
11462}
11463
11464
11465/** Opcode 0x63. */
11466FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11467{
11468 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11469 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11470 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11471 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11472 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11473}
11474
11475
11476/** Opcode 0x8c. */
11477FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11478{
11479 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11480
11481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11482
11483 /*
11484 * Check that the destination register exists. The REX.R prefix is ignored.
11485 */
11486 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11487 if ( iSegReg > X86_SREG_GS)
11488 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11489
11490 /*
11491 * If rm is denoting a register, no more instruction bytes.
11492 * In that case, the operand size is respected and the upper bits are
11493 * cleared (starting with some pentium).
11494 */
11495 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11496 {
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498 switch (pVCpu->iem.s.enmEffOpSize)
11499 {
11500 case IEMMODE_16BIT:
11501 IEM_MC_BEGIN(0, 1);
11502 IEM_MC_LOCAL(uint16_t, u16Value);
11503 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11504 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11505 IEM_MC_ADVANCE_RIP();
11506 IEM_MC_END();
11507 break;
11508
11509 case IEMMODE_32BIT:
11510 IEM_MC_BEGIN(0, 1);
11511 IEM_MC_LOCAL(uint32_t, u32Value);
11512 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11513 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11514 IEM_MC_ADVANCE_RIP();
11515 IEM_MC_END();
11516 break;
11517
11518 case IEMMODE_64BIT:
11519 IEM_MC_BEGIN(0, 1);
11520 IEM_MC_LOCAL(uint64_t, u64Value);
11521 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11522 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11523 IEM_MC_ADVANCE_RIP();
11524 IEM_MC_END();
11525 break;
11526 }
11527 }
11528 else
11529 {
11530 /*
11531 * We're saving the register to memory. The access is word sized
11532 * regardless of operand size prefixes.
11533 */
11534#if 0 /* not necessary */
11535 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11536#endif
11537 IEM_MC_BEGIN(0, 2);
11538 IEM_MC_LOCAL(uint16_t, u16Value);
11539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11542 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11543 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11544 IEM_MC_ADVANCE_RIP();
11545 IEM_MC_END();
11546 }
11547 return VINF_SUCCESS;
11548}
11549
11550
11551
11552
11553/** Opcode 0x8d. */
11554FNIEMOP_DEF(iemOp_lea_Gv_M)
11555{
11556 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11559 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11560
11561 switch (pVCpu->iem.s.enmEffOpSize)
11562 {
11563 case IEMMODE_16BIT:
11564 IEM_MC_BEGIN(0, 2);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11566 IEM_MC_LOCAL(uint16_t, u16Cast);
11567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11569 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11570 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11571 IEM_MC_ADVANCE_RIP();
11572 IEM_MC_END();
11573 return VINF_SUCCESS;
11574
11575 case IEMMODE_32BIT:
11576 IEM_MC_BEGIN(0, 2);
11577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11578 IEM_MC_LOCAL(uint32_t, u32Cast);
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11582 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11583 IEM_MC_ADVANCE_RIP();
11584 IEM_MC_END();
11585 return VINF_SUCCESS;
11586
11587 case IEMMODE_64BIT:
11588 IEM_MC_BEGIN(0, 1);
11589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11593 IEM_MC_ADVANCE_RIP();
11594 IEM_MC_END();
11595 return VINF_SUCCESS;
11596 }
11597 AssertFailedReturn(VERR_IEM_IPE_7);
11598}
11599
11600
11601/** Opcode 0x8e. */
11602FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11603{
11604 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11605
11606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11607
11608 /*
11609 * The practical operand size is 16-bit.
11610 */
11611#if 0 /* not necessary */
11612 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11613#endif
11614
11615 /*
11616 * Check that the destination register exists and can be used with this
11617 * instruction. The REX.R prefix is ignored.
11618 */
11619 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11620 if ( iSegReg == X86_SREG_CS
11621 || iSegReg > X86_SREG_GS)
11622 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11623
11624 /*
11625 * If rm is denoting a register, no more instruction bytes.
11626 */
11627 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11628 {
11629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11630 IEM_MC_BEGIN(2, 0);
11631 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11632 IEM_MC_ARG(uint16_t, u16Value, 1);
11633 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11634 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11635 IEM_MC_END();
11636 }
11637 else
11638 {
11639 /*
11640 * We're loading the register from memory. The access is word sized
11641 * regardless of operand size prefixes.
11642 */
11643 IEM_MC_BEGIN(2, 1);
11644 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11645 IEM_MC_ARG(uint16_t, u16Value, 1);
11646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11649 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11650 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11651 IEM_MC_END();
11652 }
11653 return VINF_SUCCESS;
11654}
11655
11656
11657/** Opcode 0x8f /0. */
11658FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11659{
11660 /* This bugger is rather annoying as it requires rSP to be updated before
11661 doing the effective address calculations. Will eventually require a
11662 split between the R/M+SIB decoding and the effective address
11663 calculation - which is something that is required for any attempt at
11664 reusing this code for a recompiler. It may also be good to have if we
11665 need to delay #UD exception caused by invalid lock prefixes.
11666
11667 For now, we'll do a mostly safe interpreter-only implementation here. */
11668 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11669 * now until tests show it's checked.. */
11670 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11671
11672 /* Register access is relatively easy and can share code. */
11673 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11674 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11675
11676 /*
11677 * Memory target.
11678 *
11679 * Intel says that RSP is incremented before it's used in any effective
11680 * address calcuations. This means some serious extra annoyance here since
11681 * we decode and calculate the effective address in one step and like to
11682 * delay committing registers till everything is done.
11683 *
11684 * So, we'll decode and calculate the effective address twice. This will
11685 * require some recoding if turned into a recompiler.
11686 */
11687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11688
11689#ifndef TST_IEM_CHECK_MC
11690 /* Calc effective address with modified ESP. */
11691/** @todo testcase */
11692 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11693 RTGCPTR GCPtrEff;
11694 VBOXSTRICTRC rcStrict;
11695 switch (pVCpu->iem.s.enmEffOpSize)
11696 {
11697 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11698 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11699 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11701 }
11702 if (rcStrict != VINF_SUCCESS)
11703 return rcStrict;
11704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11705
11706 /* Perform the operation - this should be CImpl. */
11707 RTUINT64U TmpRsp;
11708 TmpRsp.u = pCtx->rsp;
11709 switch (pVCpu->iem.s.enmEffOpSize)
11710 {
11711 case IEMMODE_16BIT:
11712 {
11713 uint16_t u16Value;
11714 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11715 if (rcStrict == VINF_SUCCESS)
11716 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11717 break;
11718 }
11719
11720 case IEMMODE_32BIT:
11721 {
11722 uint32_t u32Value;
11723 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11724 if (rcStrict == VINF_SUCCESS)
11725 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11726 break;
11727 }
11728
11729 case IEMMODE_64BIT:
11730 {
11731 uint64_t u64Value;
11732 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11733 if (rcStrict == VINF_SUCCESS)
11734 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11735 break;
11736 }
11737
11738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11739 }
11740 if (rcStrict == VINF_SUCCESS)
11741 {
11742 pCtx->rsp = TmpRsp.u;
11743 iemRegUpdateRipAndClearRF(pVCpu);
11744 }
11745 return rcStrict;
11746
11747#else
11748 return VERR_IEM_IPE_2;
11749#endif
11750}
11751
11752
11753/** Opcode 0x8f. */
11754FNIEMOP_DEF(iemOp_Grp1A)
11755{
11756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11757 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11758 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11759
11760 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11761 /** @todo XOP decoding. */
11762 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11763 return IEMOP_RAISE_INVALID_OPCODE();
11764}
11765
11766
11767/**
11768 * Common 'xchg reg,rAX' helper.
11769 */
11770FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11771{
11772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11773
11774 iReg |= pVCpu->iem.s.uRexB;
11775 switch (pVCpu->iem.s.enmEffOpSize)
11776 {
11777 case IEMMODE_16BIT:
11778 IEM_MC_BEGIN(0, 2);
11779 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11780 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11781 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11782 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11783 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11784 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11785 IEM_MC_ADVANCE_RIP();
11786 IEM_MC_END();
11787 return VINF_SUCCESS;
11788
11789 case IEMMODE_32BIT:
11790 IEM_MC_BEGIN(0, 2);
11791 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11792 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11793 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11794 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11795 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11796 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11797 IEM_MC_ADVANCE_RIP();
11798 IEM_MC_END();
11799 return VINF_SUCCESS;
11800
11801 case IEMMODE_64BIT:
11802 IEM_MC_BEGIN(0, 2);
11803 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11804 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11805 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11806 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11807 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11808 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11809 IEM_MC_ADVANCE_RIP();
11810 IEM_MC_END();
11811 return VINF_SUCCESS;
11812
11813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11814 }
11815}
11816
11817
11818/** Opcode 0x90. */
11819FNIEMOP_DEF(iemOp_nop)
11820{
11821 /* R8/R8D and RAX/EAX can be exchanged. */
11822 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11823 {
11824 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11825 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11826 }
11827
11828 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11829 IEMOP_MNEMONIC(pause, "pause");
11830 else
11831 IEMOP_MNEMONIC(nop, "nop");
11832 IEM_MC_BEGIN(0, 0);
11833 IEM_MC_ADVANCE_RIP();
11834 IEM_MC_END();
11835 return VINF_SUCCESS;
11836}
11837
11838
11839/** Opcode 0x91. */
11840FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11841{
11842 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11843 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11844}
11845
11846
11847/** Opcode 0x92. */
11848FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11849{
11850 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11851 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11852}
11853
11854
11855/** Opcode 0x93. */
11856FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11857{
11858 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11859 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11860}
11861
11862
11863/** Opcode 0x94. */
11864FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11865{
11866 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11867 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11868}
11869
11870
11871/** Opcode 0x95. */
11872FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11873{
11874 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11875 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11876}
11877
11878
11879/** Opcode 0x96. */
11880FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11881{
11882 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11883 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11884}
11885
11886
11887/** Opcode 0x97. */
11888FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11889{
11890 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11891 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11892}
11893
11894
11895/** Opcode 0x98. */
11896FNIEMOP_DEF(iemOp_cbw)
11897{
11898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11899 switch (pVCpu->iem.s.enmEffOpSize)
11900 {
11901 case IEMMODE_16BIT:
11902 IEMOP_MNEMONIC(cbw, "cbw");
11903 IEM_MC_BEGIN(0, 1);
11904 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11905 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11906 } IEM_MC_ELSE() {
11907 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11908 } IEM_MC_ENDIF();
11909 IEM_MC_ADVANCE_RIP();
11910 IEM_MC_END();
11911 return VINF_SUCCESS;
11912
11913 case IEMMODE_32BIT:
11914 IEMOP_MNEMONIC(cwde, "cwde");
11915 IEM_MC_BEGIN(0, 1);
11916 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11917 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11918 } IEM_MC_ELSE() {
11919 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11920 } IEM_MC_ENDIF();
11921 IEM_MC_ADVANCE_RIP();
11922 IEM_MC_END();
11923 return VINF_SUCCESS;
11924
11925 case IEMMODE_64BIT:
11926 IEMOP_MNEMONIC(cdqe, "cdqe");
11927 IEM_MC_BEGIN(0, 1);
11928 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11929 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11930 } IEM_MC_ELSE() {
11931 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11932 } IEM_MC_ENDIF();
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 return VINF_SUCCESS;
11936
11937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11938 }
11939}
11940
11941
11942/** Opcode 0x99. */
11943FNIEMOP_DEF(iemOp_cwd)
11944{
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11946 switch (pVCpu->iem.s.enmEffOpSize)
11947 {
11948 case IEMMODE_16BIT:
11949 IEMOP_MNEMONIC(cwd, "cwd");
11950 IEM_MC_BEGIN(0, 1);
11951 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11952 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11953 } IEM_MC_ELSE() {
11954 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11955 } IEM_MC_ENDIF();
11956 IEM_MC_ADVANCE_RIP();
11957 IEM_MC_END();
11958 return VINF_SUCCESS;
11959
11960 case IEMMODE_32BIT:
11961 IEMOP_MNEMONIC(cdq, "cdq");
11962 IEM_MC_BEGIN(0, 1);
11963 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11964 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11965 } IEM_MC_ELSE() {
11966 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11967 } IEM_MC_ENDIF();
11968 IEM_MC_ADVANCE_RIP();
11969 IEM_MC_END();
11970 return VINF_SUCCESS;
11971
11972 case IEMMODE_64BIT:
11973 IEMOP_MNEMONIC(cqo, "cqo");
11974 IEM_MC_BEGIN(0, 1);
11975 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11976 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11977 } IEM_MC_ELSE() {
11978 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11979 } IEM_MC_ENDIF();
11980 IEM_MC_ADVANCE_RIP();
11981 IEM_MC_END();
11982 return VINF_SUCCESS;
11983
11984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11985 }
11986}
11987
11988
11989/** Opcode 0x9a. */
11990FNIEMOP_DEF(iemOp_call_Ap)
11991{
11992 IEMOP_MNEMONIC(call_Ap, "call Ap");
11993 IEMOP_HLP_NO_64BIT();
11994
11995 /* Decode the far pointer address and pass it on to the far call C implementation. */
11996 uint32_t offSeg;
11997 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11998 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11999 else
12000 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
12001 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
12002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12003 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
12004}
12005
12006
12007/** Opcode 0x9b. (aka fwait) */
12008FNIEMOP_DEF(iemOp_wait)
12009{
12010 IEMOP_MNEMONIC(wait, "wait");
12011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12012
12013 IEM_MC_BEGIN(0, 0);
12014 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12016 IEM_MC_ADVANCE_RIP();
12017 IEM_MC_END();
12018 return VINF_SUCCESS;
12019}
12020
12021
12022/** Opcode 0x9c. */
12023FNIEMOP_DEF(iemOp_pushf_Fv)
12024{
12025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12026 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12027 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12028}
12029
12030
12031/** Opcode 0x9d. */
12032FNIEMOP_DEF(iemOp_popf_Fv)
12033{
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12035 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12036 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12037}
12038
12039
12040/** Opcode 0x9e. */
12041FNIEMOP_DEF(iemOp_sahf)
12042{
12043 IEMOP_MNEMONIC(sahf, "sahf");
12044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12045 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12046 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12047 return IEMOP_RAISE_INVALID_OPCODE();
12048 IEM_MC_BEGIN(0, 2);
12049 IEM_MC_LOCAL(uint32_t, u32Flags);
12050 IEM_MC_LOCAL(uint32_t, EFlags);
12051 IEM_MC_FETCH_EFLAGS(EFlags);
12052 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12053 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12054 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12055 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12056 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12057 IEM_MC_COMMIT_EFLAGS(EFlags);
12058 IEM_MC_ADVANCE_RIP();
12059 IEM_MC_END();
12060 return VINF_SUCCESS;
12061}
12062
12063
12064/** Opcode 0x9f. */
12065FNIEMOP_DEF(iemOp_lahf)
12066{
12067 IEMOP_MNEMONIC(lahf, "lahf");
12068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12069 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12070 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12071 return IEMOP_RAISE_INVALID_OPCODE();
12072 IEM_MC_BEGIN(0, 1);
12073 IEM_MC_LOCAL(uint8_t, u8Flags);
12074 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12075 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12076 IEM_MC_ADVANCE_RIP();
12077 IEM_MC_END();
12078 return VINF_SUCCESS;
12079}
12080
12081
12082/**
12083 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12084 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12085 * prefixes. Will return on failures.
12086 * @param a_GCPtrMemOff The variable to store the offset in.
12087 */
12088#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12089 do \
12090 { \
12091 switch (pVCpu->iem.s.enmEffAddrMode) \
12092 { \
12093 case IEMMODE_16BIT: \
12094 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12095 break; \
12096 case IEMMODE_32BIT: \
12097 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12098 break; \
12099 case IEMMODE_64BIT: \
12100 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12101 break; \
12102 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12103 } \
12104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12105 } while (0)
12106
12107/** Opcode 0xa0. */
12108FNIEMOP_DEF(iemOp_mov_Al_Ob)
12109{
12110 /*
12111 * Get the offset and fend of lock prefixes.
12112 */
12113 RTGCPTR GCPtrMemOff;
12114 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12115
12116 /*
12117 * Fetch AL.
12118 */
12119 IEM_MC_BEGIN(0,1);
12120 IEM_MC_LOCAL(uint8_t, u8Tmp);
12121 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12122 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12123 IEM_MC_ADVANCE_RIP();
12124 IEM_MC_END();
12125 return VINF_SUCCESS;
12126}
12127
12128
12129/** Opcode 0xa1. */
12130FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12131{
12132 /*
12133 * Get the offset and fend of lock prefixes.
12134 */
12135 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12136 RTGCPTR GCPtrMemOff;
12137 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12138
12139 /*
12140 * Fetch rAX.
12141 */
12142 switch (pVCpu->iem.s.enmEffOpSize)
12143 {
12144 case IEMMODE_16BIT:
12145 IEM_MC_BEGIN(0,1);
12146 IEM_MC_LOCAL(uint16_t, u16Tmp);
12147 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12148 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12149 IEM_MC_ADVANCE_RIP();
12150 IEM_MC_END();
12151 return VINF_SUCCESS;
12152
12153 case IEMMODE_32BIT:
12154 IEM_MC_BEGIN(0,1);
12155 IEM_MC_LOCAL(uint32_t, u32Tmp);
12156 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12157 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12158 IEM_MC_ADVANCE_RIP();
12159 IEM_MC_END();
12160 return VINF_SUCCESS;
12161
12162 case IEMMODE_64BIT:
12163 IEM_MC_BEGIN(0,1);
12164 IEM_MC_LOCAL(uint64_t, u64Tmp);
12165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12166 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12167 IEM_MC_ADVANCE_RIP();
12168 IEM_MC_END();
12169 return VINF_SUCCESS;
12170
12171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12172 }
12173}
12174
12175
12176/** Opcode 0xa2. */
12177FNIEMOP_DEF(iemOp_mov_Ob_AL)
12178{
12179 /*
12180 * Get the offset and fend of lock prefixes.
12181 */
12182 RTGCPTR GCPtrMemOff;
12183 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12184
12185 /*
12186 * Store AL.
12187 */
12188 IEM_MC_BEGIN(0,1);
12189 IEM_MC_LOCAL(uint8_t, u8Tmp);
12190 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12191 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12192 IEM_MC_ADVANCE_RIP();
12193 IEM_MC_END();
12194 return VINF_SUCCESS;
12195}
12196
12197
12198/** Opcode 0xa3. */
12199FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12200{
12201 /*
12202 * Get the offset and fend of lock prefixes.
12203 */
12204 RTGCPTR GCPtrMemOff;
12205 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12206
12207 /*
12208 * Store rAX.
12209 */
12210 switch (pVCpu->iem.s.enmEffOpSize)
12211 {
12212 case IEMMODE_16BIT:
12213 IEM_MC_BEGIN(0,1);
12214 IEM_MC_LOCAL(uint16_t, u16Tmp);
12215 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12216 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12217 IEM_MC_ADVANCE_RIP();
12218 IEM_MC_END();
12219 return VINF_SUCCESS;
12220
12221 case IEMMODE_32BIT:
12222 IEM_MC_BEGIN(0,1);
12223 IEM_MC_LOCAL(uint32_t, u32Tmp);
12224 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12225 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12226 IEM_MC_ADVANCE_RIP();
12227 IEM_MC_END();
12228 return VINF_SUCCESS;
12229
12230 case IEMMODE_64BIT:
12231 IEM_MC_BEGIN(0,1);
12232 IEM_MC_LOCAL(uint64_t, u64Tmp);
12233 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12234 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12235 IEM_MC_ADVANCE_RIP();
12236 IEM_MC_END();
12237 return VINF_SUCCESS;
12238
12239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12240 }
12241}
12242
12243/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12244#define IEM_MOVS_CASE(ValBits, AddrBits) \
12245 IEM_MC_BEGIN(0, 2); \
12246 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12247 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12248 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12249 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12250 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12251 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12253 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12254 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12255 } IEM_MC_ELSE() { \
12256 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12257 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12258 } IEM_MC_ENDIF(); \
12259 IEM_MC_ADVANCE_RIP(); \
12260 IEM_MC_END();
12261
12262/** Opcode 0xa4. */
12263FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12264{
12265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12266
12267 /*
12268 * Use the C implementation if a repeat prefix is encountered.
12269 */
12270 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12271 {
12272 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12273 switch (pVCpu->iem.s.enmEffAddrMode)
12274 {
12275 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12276 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12277 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12279 }
12280 }
12281 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12282
12283 /*
12284 * Sharing case implementation with movs[wdq] below.
12285 */
12286 switch (pVCpu->iem.s.enmEffAddrMode)
12287 {
12288 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12289 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12290 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12292 }
12293 return VINF_SUCCESS;
12294}
12295
12296
12297/** Opcode 0xa5. */
12298FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12299{
12300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12301
12302 /*
12303 * Use the C implementation if a repeat prefix is encountered.
12304 */
12305 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12306 {
12307 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12308 switch (pVCpu->iem.s.enmEffOpSize)
12309 {
12310 case IEMMODE_16BIT:
12311 switch (pVCpu->iem.s.enmEffAddrMode)
12312 {
12313 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12314 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12315 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12317 }
12318 break;
12319 case IEMMODE_32BIT:
12320 switch (pVCpu->iem.s.enmEffAddrMode)
12321 {
12322 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12323 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12324 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12325 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12326 }
12327 case IEMMODE_64BIT:
12328 switch (pVCpu->iem.s.enmEffAddrMode)
12329 {
12330 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12331 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12332 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12334 }
12335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12336 }
12337 }
12338 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12339
12340 /*
12341 * Annoying double switch here.
12342 * Using ugly macro for implementing the cases, sharing it with movsb.
12343 */
12344 switch (pVCpu->iem.s.enmEffOpSize)
12345 {
12346 case IEMMODE_16BIT:
12347 switch (pVCpu->iem.s.enmEffAddrMode)
12348 {
12349 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12350 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12351 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12353 }
12354 break;
12355
12356 case IEMMODE_32BIT:
12357 switch (pVCpu->iem.s.enmEffAddrMode)
12358 {
12359 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12360 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12361 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12363 }
12364 break;
12365
12366 case IEMMODE_64BIT:
12367 switch (pVCpu->iem.s.enmEffAddrMode)
12368 {
12369 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12370 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12371 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12373 }
12374 break;
12375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12376 }
12377 return VINF_SUCCESS;
12378}
12379
12380#undef IEM_MOVS_CASE
12381
12382/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12383#define IEM_CMPS_CASE(ValBits, AddrBits) \
12384 IEM_MC_BEGIN(3, 3); \
12385 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12386 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12388 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12389 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12390 \
12391 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12392 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12393 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12394 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12395 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12396 IEM_MC_REF_EFLAGS(pEFlags); \
12397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12398 \
12399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12400 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12401 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12402 } IEM_MC_ELSE() { \
12403 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12404 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12405 } IEM_MC_ENDIF(); \
12406 IEM_MC_ADVANCE_RIP(); \
12407 IEM_MC_END(); \
12408
12409/** Opcode 0xa6. */
12410FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12411{
12412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12413
12414 /*
12415 * Use the C implementation if a repeat prefix is encountered.
12416 */
12417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12418 {
12419 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12420 switch (pVCpu->iem.s.enmEffAddrMode)
12421 {
12422 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12423 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12424 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12426 }
12427 }
12428 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12429 {
12430 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12431 switch (pVCpu->iem.s.enmEffAddrMode)
12432 {
12433 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12434 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12435 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12437 }
12438 }
12439 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12440
12441 /*
12442 * Sharing case implementation with cmps[wdq] below.
12443 */
12444 switch (pVCpu->iem.s.enmEffAddrMode)
12445 {
12446 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12447 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12448 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12450 }
12451 return VINF_SUCCESS;
12452
12453}
12454
12455
12456/** Opcode 0xa7. */
12457FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12458{
12459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12460
12461 /*
12462 * Use the C implementation if a repeat prefix is encountered.
12463 */
12464 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12465 {
12466 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12467 switch (pVCpu->iem.s.enmEffOpSize)
12468 {
12469 case IEMMODE_16BIT:
12470 switch (pVCpu->iem.s.enmEffAddrMode)
12471 {
12472 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12473 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12474 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12476 }
12477 break;
12478 case IEMMODE_32BIT:
12479 switch (pVCpu->iem.s.enmEffAddrMode)
12480 {
12481 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12482 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12483 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12485 }
12486 case IEMMODE_64BIT:
12487 switch (pVCpu->iem.s.enmEffAddrMode)
12488 {
12489 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12490 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12491 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12492 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12493 }
12494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12495 }
12496 }
12497
12498 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12499 {
12500 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12501 switch (pVCpu->iem.s.enmEffOpSize)
12502 {
12503 case IEMMODE_16BIT:
12504 switch (pVCpu->iem.s.enmEffAddrMode)
12505 {
12506 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12507 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12508 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12510 }
12511 break;
12512 case IEMMODE_32BIT:
12513 switch (pVCpu->iem.s.enmEffAddrMode)
12514 {
12515 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12516 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12517 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12519 }
12520 case IEMMODE_64BIT:
12521 switch (pVCpu->iem.s.enmEffAddrMode)
12522 {
12523 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12524 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12525 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12527 }
12528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12529 }
12530 }
12531
12532 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12533
12534 /*
12535 * Annoying double switch here.
12536 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12537 */
12538 switch (pVCpu->iem.s.enmEffOpSize)
12539 {
12540 case IEMMODE_16BIT:
12541 switch (pVCpu->iem.s.enmEffAddrMode)
12542 {
12543 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12544 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12545 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12547 }
12548 break;
12549
12550 case IEMMODE_32BIT:
12551 switch (pVCpu->iem.s.enmEffAddrMode)
12552 {
12553 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12554 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12555 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12557 }
12558 break;
12559
12560 case IEMMODE_64BIT:
12561 switch (pVCpu->iem.s.enmEffAddrMode)
12562 {
12563 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12564 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12565 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12567 }
12568 break;
12569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12570 }
12571 return VINF_SUCCESS;
12572
12573}
12574
12575#undef IEM_CMPS_CASE
12576
12577/** Opcode 0xa8. */
12578FNIEMOP_DEF(iemOp_test_AL_Ib)
12579{
12580 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12583}
12584
12585
12586/** Opcode 0xa9. */
12587FNIEMOP_DEF(iemOp_test_eAX_Iz)
12588{
12589 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12592}
12593
12594
12595/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12596#define IEM_STOS_CASE(ValBits, AddrBits) \
12597 IEM_MC_BEGIN(0, 2); \
12598 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12599 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12600 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12601 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12602 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12603 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12604 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12605 } IEM_MC_ELSE() { \
12606 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12607 } IEM_MC_ENDIF(); \
12608 IEM_MC_ADVANCE_RIP(); \
12609 IEM_MC_END(); \
12610
12611/** Opcode 0xaa. */
12612FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12613{
12614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12615
12616 /*
12617 * Use the C implementation if a repeat prefix is encountered.
12618 */
12619 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12620 {
12621 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12622 switch (pVCpu->iem.s.enmEffAddrMode)
12623 {
12624 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12625 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12626 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12627 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12628 }
12629 }
12630 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12631
12632 /*
12633 * Sharing case implementation with stos[wdq] below.
12634 */
12635 switch (pVCpu->iem.s.enmEffAddrMode)
12636 {
12637 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12638 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12639 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12641 }
12642 return VINF_SUCCESS;
12643}
12644
12645
12646/** Opcode 0xab. */
12647FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12648{
12649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12650
12651 /*
12652 * Use the C implementation if a repeat prefix is encountered.
12653 */
12654 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12655 {
12656 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12657 switch (pVCpu->iem.s.enmEffOpSize)
12658 {
12659 case IEMMODE_16BIT:
12660 switch (pVCpu->iem.s.enmEffAddrMode)
12661 {
12662 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12663 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12664 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12666 }
12667 break;
12668 case IEMMODE_32BIT:
12669 switch (pVCpu->iem.s.enmEffAddrMode)
12670 {
12671 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12672 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12673 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12675 }
12676 case IEMMODE_64BIT:
12677 switch (pVCpu->iem.s.enmEffAddrMode)
12678 {
12679 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12680 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12681 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12683 }
12684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12685 }
12686 }
12687 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12688
12689 /*
12690 * Annoying double switch here.
12691 * Using ugly macro for implementing the cases, sharing it with stosb.
12692 */
12693 switch (pVCpu->iem.s.enmEffOpSize)
12694 {
12695 case IEMMODE_16BIT:
12696 switch (pVCpu->iem.s.enmEffAddrMode)
12697 {
12698 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12699 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12700 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12702 }
12703 break;
12704
12705 case IEMMODE_32BIT:
12706 switch (pVCpu->iem.s.enmEffAddrMode)
12707 {
12708 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12709 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12710 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12712 }
12713 break;
12714
12715 case IEMMODE_64BIT:
12716 switch (pVCpu->iem.s.enmEffAddrMode)
12717 {
12718 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12719 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12720 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12722 }
12723 break;
12724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12725 }
12726 return VINF_SUCCESS;
12727}
12728
12729#undef IEM_STOS_CASE
12730
12731/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12732#define IEM_LODS_CASE(ValBits, AddrBits) \
12733 IEM_MC_BEGIN(0, 2); \
12734 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12735 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12736 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12737 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12738 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12739 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12740 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12741 } IEM_MC_ELSE() { \
12742 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12743 } IEM_MC_ENDIF(); \
12744 IEM_MC_ADVANCE_RIP(); \
12745 IEM_MC_END();
12746
12747/** Opcode 0xac. */
12748FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12749{
12750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12751
12752 /*
12753 * Use the C implementation if a repeat prefix is encountered.
12754 */
12755 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12756 {
12757 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12758 switch (pVCpu->iem.s.enmEffAddrMode)
12759 {
12760 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12761 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12762 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12764 }
12765 }
12766 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12767
12768 /*
12769 * Sharing case implementation with stos[wdq] below.
12770 */
12771 switch (pVCpu->iem.s.enmEffAddrMode)
12772 {
12773 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12774 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12775 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12777 }
12778 return VINF_SUCCESS;
12779}
12780
12781
12782/** Opcode 0xad. */
12783FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12784{
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786
12787 /*
12788 * Use the C implementation if a repeat prefix is encountered.
12789 */
12790 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12791 {
12792 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12793 switch (pVCpu->iem.s.enmEffOpSize)
12794 {
12795 case IEMMODE_16BIT:
12796 switch (pVCpu->iem.s.enmEffAddrMode)
12797 {
12798 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12799 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12800 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12802 }
12803 break;
12804 case IEMMODE_32BIT:
12805 switch (pVCpu->iem.s.enmEffAddrMode)
12806 {
12807 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12808 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12809 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12811 }
12812 case IEMMODE_64BIT:
12813 switch (pVCpu->iem.s.enmEffAddrMode)
12814 {
12815 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12816 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12817 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12819 }
12820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12821 }
12822 }
12823 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12824
12825 /*
12826 * Annoying double switch here.
12827 * Using ugly macro for implementing the cases, sharing it with lodsb.
12828 */
12829 switch (pVCpu->iem.s.enmEffOpSize)
12830 {
12831 case IEMMODE_16BIT:
12832 switch (pVCpu->iem.s.enmEffAddrMode)
12833 {
12834 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12835 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12836 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12838 }
12839 break;
12840
12841 case IEMMODE_32BIT:
12842 switch (pVCpu->iem.s.enmEffAddrMode)
12843 {
12844 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12845 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12846 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12848 }
12849 break;
12850
12851 case IEMMODE_64BIT:
12852 switch (pVCpu->iem.s.enmEffAddrMode)
12853 {
12854 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12855 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12856 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12858 }
12859 break;
12860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12861 }
12862 return VINF_SUCCESS;
12863}
12864
12865#undef IEM_LODS_CASE
12866
12867/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12868#define IEM_SCAS_CASE(ValBits, AddrBits) \
12869 IEM_MC_BEGIN(3, 2); \
12870 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12871 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12872 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12873 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12874 \
12875 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12876 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12877 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12878 IEM_MC_REF_EFLAGS(pEFlags); \
12879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12880 \
12881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12882 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12883 } IEM_MC_ELSE() { \
12884 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12885 } IEM_MC_ENDIF(); \
12886 IEM_MC_ADVANCE_RIP(); \
12887 IEM_MC_END();
12888
12889/** Opcode 0xae. */
12890FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12891{
12892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12893
12894 /*
12895 * Use the C implementation if a repeat prefix is encountered.
12896 */
12897 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12898 {
12899 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12900 switch (pVCpu->iem.s.enmEffAddrMode)
12901 {
12902 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12903 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12904 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12906 }
12907 }
12908 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12909 {
12910 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12911 switch (pVCpu->iem.s.enmEffAddrMode)
12912 {
12913 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12914 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12915 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12917 }
12918 }
12919 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12920
12921 /*
12922 * Sharing case implementation with stos[wdq] below.
12923 */
12924 switch (pVCpu->iem.s.enmEffAddrMode)
12925 {
12926 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12927 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12928 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12930 }
12931 return VINF_SUCCESS;
12932}
12933
12934
12935/** Opcode 0xaf. */
12936FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12937{
12938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12939
12940 /*
12941 * Use the C implementation if a repeat prefix is encountered.
12942 */
12943 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12944 {
12945 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12946 switch (pVCpu->iem.s.enmEffOpSize)
12947 {
12948 case IEMMODE_16BIT:
12949 switch (pVCpu->iem.s.enmEffAddrMode)
12950 {
12951 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12952 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12953 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12954 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12955 }
12956 break;
12957 case IEMMODE_32BIT:
12958 switch (pVCpu->iem.s.enmEffAddrMode)
12959 {
12960 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12961 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12962 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12964 }
12965 case IEMMODE_64BIT:
12966 switch (pVCpu->iem.s.enmEffAddrMode)
12967 {
12968 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12972 }
12973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12974 }
12975 }
12976 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12977 {
12978 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12979 switch (pVCpu->iem.s.enmEffOpSize)
12980 {
12981 case IEMMODE_16BIT:
12982 switch (pVCpu->iem.s.enmEffAddrMode)
12983 {
12984 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12985 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12986 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12988 }
12989 break;
12990 case IEMMODE_32BIT:
12991 switch (pVCpu->iem.s.enmEffAddrMode)
12992 {
12993 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12994 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12995 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12997 }
12998 case IEMMODE_64BIT:
12999 switch (pVCpu->iem.s.enmEffAddrMode)
13000 {
13001 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
13002 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
13003 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
13004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13005 }
13006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13007 }
13008 }
13009 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
13010
13011 /*
13012 * Annoying double switch here.
13013 * Using ugly macro for implementing the cases, sharing it with scasb.
13014 */
13015 switch (pVCpu->iem.s.enmEffOpSize)
13016 {
13017 case IEMMODE_16BIT:
13018 switch (pVCpu->iem.s.enmEffAddrMode)
13019 {
13020 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13021 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13022 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13024 }
13025 break;
13026
13027 case IEMMODE_32BIT:
13028 switch (pVCpu->iem.s.enmEffAddrMode)
13029 {
13030 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13031 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13032 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13034 }
13035 break;
13036
13037 case IEMMODE_64BIT:
13038 switch (pVCpu->iem.s.enmEffAddrMode)
13039 {
13040 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13041 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13042 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13044 }
13045 break;
13046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13047 }
13048 return VINF_SUCCESS;
13049}
13050
13051#undef IEM_SCAS_CASE
13052
13053/**
13054 * Common 'mov r8, imm8' helper.
13055 */
13056FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13057{
13058 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13060
13061 IEM_MC_BEGIN(0, 1);
13062 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13063 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13064 IEM_MC_ADVANCE_RIP();
13065 IEM_MC_END();
13066
13067 return VINF_SUCCESS;
13068}
13069
13070
13071/** Opcode 0xb0. */
13072FNIEMOP_DEF(iemOp_mov_AL_Ib)
13073{
13074 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13075 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13076}
13077
13078
13079/** Opcode 0xb1. */
13080FNIEMOP_DEF(iemOp_CL_Ib)
13081{
13082 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13083 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13084}
13085
13086
13087/** Opcode 0xb2. */
13088FNIEMOP_DEF(iemOp_DL_Ib)
13089{
13090 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13091 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13092}
13093
13094
13095/** Opcode 0xb3. */
13096FNIEMOP_DEF(iemOp_BL_Ib)
13097{
13098 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13099 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13100}
13101
13102
13103/** Opcode 0xb4. */
13104FNIEMOP_DEF(iemOp_mov_AH_Ib)
13105{
13106 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13107 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13108}
13109
13110
13111/** Opcode 0xb5. */
13112FNIEMOP_DEF(iemOp_CH_Ib)
13113{
13114 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13115 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13116}
13117
13118
13119/** Opcode 0xb6. */
13120FNIEMOP_DEF(iemOp_DH_Ib)
13121{
13122 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13123 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13124}
13125
13126
13127/** Opcode 0xb7. */
13128FNIEMOP_DEF(iemOp_BH_Ib)
13129{
13130 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13131 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13132}
13133
13134
13135/**
13136 * Common 'mov regX,immX' helper.
13137 */
13138FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13139{
13140 switch (pVCpu->iem.s.enmEffOpSize)
13141 {
13142 case IEMMODE_16BIT:
13143 {
13144 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13146
13147 IEM_MC_BEGIN(0, 1);
13148 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13149 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13150 IEM_MC_ADVANCE_RIP();
13151 IEM_MC_END();
13152 break;
13153 }
13154
13155 case IEMMODE_32BIT:
13156 {
13157 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13159
13160 IEM_MC_BEGIN(0, 1);
13161 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13162 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13163 IEM_MC_ADVANCE_RIP();
13164 IEM_MC_END();
13165 break;
13166 }
13167 case IEMMODE_64BIT:
13168 {
13169 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13171
13172 IEM_MC_BEGIN(0, 1);
13173 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13174 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13175 IEM_MC_ADVANCE_RIP();
13176 IEM_MC_END();
13177 break;
13178 }
13179 }
13180
13181 return VINF_SUCCESS;
13182}
13183
13184
13185/** Opcode 0xb8. */
13186FNIEMOP_DEF(iemOp_eAX_Iv)
13187{
13188 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13189 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13190}
13191
13192
13193/** Opcode 0xb9. */
13194FNIEMOP_DEF(iemOp_eCX_Iv)
13195{
13196 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13197 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13198}
13199
13200
13201/** Opcode 0xba. */
13202FNIEMOP_DEF(iemOp_eDX_Iv)
13203{
13204 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13205 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13206}
13207
13208
13209/** Opcode 0xbb. */
13210FNIEMOP_DEF(iemOp_eBX_Iv)
13211{
13212 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13213 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13214}
13215
13216
13217/** Opcode 0xbc. */
13218FNIEMOP_DEF(iemOp_eSP_Iv)
13219{
13220 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13221 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13222}
13223
13224
13225/** Opcode 0xbd. */
13226FNIEMOP_DEF(iemOp_eBP_Iv)
13227{
13228 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13229 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13230}
13231
13232
13233/** Opcode 0xbe. */
13234FNIEMOP_DEF(iemOp_eSI_Iv)
13235{
13236 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13237 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13238}
13239
13240
13241/** Opcode 0xbf. */
13242FNIEMOP_DEF(iemOp_eDI_Iv)
13243{
13244 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13245 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13246}
13247
13248
13249/** Opcode 0xc0. */
13250FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13251{
13252 IEMOP_HLP_MIN_186();
13253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13254 PCIEMOPSHIFTSIZES pImpl;
13255 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13256 {
13257 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13258 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13259 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13260 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13261 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13262 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13263 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13264 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13265 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13266 }
13267 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13268
13269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13270 {
13271 /* register */
13272 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13274 IEM_MC_BEGIN(3, 0);
13275 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13276 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13278 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13279 IEM_MC_REF_EFLAGS(pEFlags);
13280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13281 IEM_MC_ADVANCE_RIP();
13282 IEM_MC_END();
13283 }
13284 else
13285 {
13286 /* memory */
13287 IEM_MC_BEGIN(3, 2);
13288 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13289 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13290 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13292
13293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13294 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13295 IEM_MC_ASSIGN(cShiftArg, cShift);
13296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13297 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13298 IEM_MC_FETCH_EFLAGS(EFlags);
13299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13300
13301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13302 IEM_MC_COMMIT_EFLAGS(EFlags);
13303 IEM_MC_ADVANCE_RIP();
13304 IEM_MC_END();
13305 }
13306 return VINF_SUCCESS;
13307}
13308
13309
13310/** Opcode 0xc1. */
13311FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13312{
13313 IEMOP_HLP_MIN_186();
13314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13315 PCIEMOPSHIFTSIZES pImpl;
13316 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13317 {
13318 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13319 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13320 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13321 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13322 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13323 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13324 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13325 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13326 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13327 }
13328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13329
13330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13331 {
13332 /* register */
13333 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13335 switch (pVCpu->iem.s.enmEffOpSize)
13336 {
13337 case IEMMODE_16BIT:
13338 IEM_MC_BEGIN(3, 0);
13339 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13340 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13342 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13343 IEM_MC_REF_EFLAGS(pEFlags);
13344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13345 IEM_MC_ADVANCE_RIP();
13346 IEM_MC_END();
13347 return VINF_SUCCESS;
13348
13349 case IEMMODE_32BIT:
13350 IEM_MC_BEGIN(3, 0);
13351 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13352 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13353 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13354 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13355 IEM_MC_REF_EFLAGS(pEFlags);
13356 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13357 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13358 IEM_MC_ADVANCE_RIP();
13359 IEM_MC_END();
13360 return VINF_SUCCESS;
13361
13362 case IEMMODE_64BIT:
13363 IEM_MC_BEGIN(3, 0);
13364 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13365 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13366 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13367 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13368 IEM_MC_REF_EFLAGS(pEFlags);
13369 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13370 IEM_MC_ADVANCE_RIP();
13371 IEM_MC_END();
13372 return VINF_SUCCESS;
13373
13374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13375 }
13376 }
13377 else
13378 {
13379 /* memory */
13380 switch (pVCpu->iem.s.enmEffOpSize)
13381 {
13382 case IEMMODE_16BIT:
13383 IEM_MC_BEGIN(3, 2);
13384 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13385 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13386 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13388
13389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13390 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13391 IEM_MC_ASSIGN(cShiftArg, cShift);
13392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13393 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13394 IEM_MC_FETCH_EFLAGS(EFlags);
13395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13396
13397 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13398 IEM_MC_COMMIT_EFLAGS(EFlags);
13399 IEM_MC_ADVANCE_RIP();
13400 IEM_MC_END();
13401 return VINF_SUCCESS;
13402
13403 case IEMMODE_32BIT:
13404 IEM_MC_BEGIN(3, 2);
13405 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13406 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13407 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13409
13410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13411 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13412 IEM_MC_ASSIGN(cShiftArg, cShift);
13413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13414 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13415 IEM_MC_FETCH_EFLAGS(EFlags);
13416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13417
13418 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13419 IEM_MC_COMMIT_EFLAGS(EFlags);
13420 IEM_MC_ADVANCE_RIP();
13421 IEM_MC_END();
13422 return VINF_SUCCESS;
13423
13424 case IEMMODE_64BIT:
13425 IEM_MC_BEGIN(3, 2);
13426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13427 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13430
13431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13432 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13433 IEM_MC_ASSIGN(cShiftArg, cShift);
13434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13435 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13436 IEM_MC_FETCH_EFLAGS(EFlags);
13437 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13438
13439 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13440 IEM_MC_COMMIT_EFLAGS(EFlags);
13441 IEM_MC_ADVANCE_RIP();
13442 IEM_MC_END();
13443 return VINF_SUCCESS;
13444
13445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13446 }
13447 }
13448}
13449
13450
13451/** Opcode 0xc2. */
13452FNIEMOP_DEF(iemOp_retn_Iw)
13453{
13454 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13457 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13458 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13459}
13460
13461
13462/** Opcode 0xc3. */
13463FNIEMOP_DEF(iemOp_retn)
13464{
13465 IEMOP_MNEMONIC(retn, "retn");
13466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13468 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13469}
13470
13471
13472/** Opcode 0xc4. */
13473FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13474{
13475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13476 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13477 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13478 {
13479 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13480 /* The LES instruction is invalid 64-bit mode. In legacy and
13481 compatability mode it is invalid with MOD=3.
13482 The use as a VEX prefix is made possible by assigning the inverted
13483 REX.R to the top MOD bit, and the top bit in the inverted register
13484 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13485 to accessing registers 0..7 in this VEX form. */
13486 /** @todo VEX: Just use new tables for it. */
13487 return IEMOP_RAISE_INVALID_OPCODE();
13488 }
13489 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13490 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13491}
13492
13493
13494/** Opcode 0xc5. */
13495FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13496{
13497 /* The LDS instruction is invalid 64-bit mode. In legacy and
13498 compatability mode it is invalid with MOD=3.
13499 The use as a VEX prefix is made possible by assigning the inverted
13500 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13501 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13503 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13504 {
13505 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13506 {
13507 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13508 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13509 }
13510 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13511 }
13512
13513 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13514 /** @todo Test when exctly the VEX conformance checks kick in during
13515 * instruction decoding and fetching (using \#PF). */
13516 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13517 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13518 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13519#if 0 /* will make sense of this next week... */
13520 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13521 &&
13522 )
13523 {
13524
13525 }
13526#endif
13527
13528 /** @todo VEX: Just use new tables for it. */
13529 return IEMOP_RAISE_INVALID_OPCODE();
13530}
13531
13532
13533/** Opcode 0xc6. */
13534FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13535{
13536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13537 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13538 return IEMOP_RAISE_INVALID_OPCODE();
13539 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13540
13541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13542 {
13543 /* register access */
13544 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13546 IEM_MC_BEGIN(0, 0);
13547 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13548 IEM_MC_ADVANCE_RIP();
13549 IEM_MC_END();
13550 }
13551 else
13552 {
13553 /* memory access. */
13554 IEM_MC_BEGIN(0, 1);
13555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13559 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13560 IEM_MC_ADVANCE_RIP();
13561 IEM_MC_END();
13562 }
13563 return VINF_SUCCESS;
13564}
13565
13566
13567/** Opcode 0xc7. */
13568FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13569{
13570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13571 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13572 return IEMOP_RAISE_INVALID_OPCODE();
13573 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13574
13575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13576 {
13577 /* register access */
13578 switch (pVCpu->iem.s.enmEffOpSize)
13579 {
13580 case IEMMODE_16BIT:
13581 IEM_MC_BEGIN(0, 0);
13582 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13584 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13585 IEM_MC_ADVANCE_RIP();
13586 IEM_MC_END();
13587 return VINF_SUCCESS;
13588
13589 case IEMMODE_32BIT:
13590 IEM_MC_BEGIN(0, 0);
13591 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13593 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13594 IEM_MC_ADVANCE_RIP();
13595 IEM_MC_END();
13596 return VINF_SUCCESS;
13597
13598 case IEMMODE_64BIT:
13599 IEM_MC_BEGIN(0, 0);
13600 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13602 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13603 IEM_MC_ADVANCE_RIP();
13604 IEM_MC_END();
13605 return VINF_SUCCESS;
13606
13607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13608 }
13609 }
13610 else
13611 {
13612 /* memory access. */
13613 switch (pVCpu->iem.s.enmEffOpSize)
13614 {
13615 case IEMMODE_16BIT:
13616 IEM_MC_BEGIN(0, 1);
13617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13619 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13621 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13622 IEM_MC_ADVANCE_RIP();
13623 IEM_MC_END();
13624 return VINF_SUCCESS;
13625
13626 case IEMMODE_32BIT:
13627 IEM_MC_BEGIN(0, 1);
13628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13630 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13632 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13633 IEM_MC_ADVANCE_RIP();
13634 IEM_MC_END();
13635 return VINF_SUCCESS;
13636
13637 case IEMMODE_64BIT:
13638 IEM_MC_BEGIN(0, 1);
13639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13641 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13643 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13644 IEM_MC_ADVANCE_RIP();
13645 IEM_MC_END();
13646 return VINF_SUCCESS;
13647
13648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13649 }
13650 }
13651}
13652
13653
13654
13655
13656/** Opcode 0xc8. */
13657FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13658{
13659 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13660 IEMOP_HLP_MIN_186();
13661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13662 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13663 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13665 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13666}
13667
13668
13669/** Opcode 0xc9. */
13670FNIEMOP_DEF(iemOp_leave)
13671{
13672 IEMOP_MNEMONIC(leave, "leave");
13673 IEMOP_HLP_MIN_186();
13674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13676 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13677}
13678
13679
13680/** Opcode 0xca. */
13681FNIEMOP_DEF(iemOp_retf_Iw)
13682{
13683 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13684 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13686 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13687 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13688}
13689
13690
13691/** Opcode 0xcb. */
13692FNIEMOP_DEF(iemOp_retf)
13693{
13694 IEMOP_MNEMONIC(retf, "retf");
13695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13697 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13698}
13699
13700
13701/** Opcode 0xcc. */
13702FNIEMOP_DEF(iemOp_int_3)
13703{
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13705 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13706}
13707
13708
13709/** Opcode 0xcd. */
13710FNIEMOP_DEF(iemOp_int_Ib)
13711{
13712 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13714 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13715}
13716
13717
13718/** Opcode 0xce. */
13719FNIEMOP_DEF(iemOp_into)
13720{
13721 IEMOP_MNEMONIC(into, "into");
13722 IEMOP_HLP_NO_64BIT();
13723
13724 IEM_MC_BEGIN(2, 0);
13725 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13726 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13727 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13728 IEM_MC_END();
13729 return VINF_SUCCESS;
13730}
13731
13732
13733/** Opcode 0xcf. */
13734FNIEMOP_DEF(iemOp_iret)
13735{
13736 IEMOP_MNEMONIC(iret, "iret");
13737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13738 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13739}
13740
13741
13742/** Opcode 0xd0. */
13743FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13744{
13745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13746 PCIEMOPSHIFTSIZES pImpl;
13747 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13748 {
13749 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13750 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13751 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13752 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13753 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13754 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13755 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13756 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13757 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13758 }
13759 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13760
13761 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13762 {
13763 /* register */
13764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13765 IEM_MC_BEGIN(3, 0);
13766 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13767 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13769 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13770 IEM_MC_REF_EFLAGS(pEFlags);
13771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13772 IEM_MC_ADVANCE_RIP();
13773 IEM_MC_END();
13774 }
13775 else
13776 {
13777 /* memory */
13778 IEM_MC_BEGIN(3, 2);
13779 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13780 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13781 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13783
13784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13786 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13787 IEM_MC_FETCH_EFLAGS(EFlags);
13788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13789
13790 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13791 IEM_MC_COMMIT_EFLAGS(EFlags);
13792 IEM_MC_ADVANCE_RIP();
13793 IEM_MC_END();
13794 }
13795 return VINF_SUCCESS;
13796}
13797
13798
13799
13800/** Opcode 0xd1. */
13801FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13802{
13803 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13804 PCIEMOPSHIFTSIZES pImpl;
13805 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13806 {
13807 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13808 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13809 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13810 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13811 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13812 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13813 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13814 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13815 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13816 }
13817 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13818
13819 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13820 {
13821 /* register */
13822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13823 switch (pVCpu->iem.s.enmEffOpSize)
13824 {
13825 case IEMMODE_16BIT:
13826 IEM_MC_BEGIN(3, 0);
13827 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13828 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13829 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13830 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13831 IEM_MC_REF_EFLAGS(pEFlags);
13832 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13833 IEM_MC_ADVANCE_RIP();
13834 IEM_MC_END();
13835 return VINF_SUCCESS;
13836
13837 case IEMMODE_32BIT:
13838 IEM_MC_BEGIN(3, 0);
13839 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13840 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13842 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13843 IEM_MC_REF_EFLAGS(pEFlags);
13844 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13845 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13846 IEM_MC_ADVANCE_RIP();
13847 IEM_MC_END();
13848 return VINF_SUCCESS;
13849
13850 case IEMMODE_64BIT:
13851 IEM_MC_BEGIN(3, 0);
13852 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13853 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13854 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13855 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13856 IEM_MC_REF_EFLAGS(pEFlags);
13857 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13858 IEM_MC_ADVANCE_RIP();
13859 IEM_MC_END();
13860 return VINF_SUCCESS;
13861
13862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13863 }
13864 }
13865 else
13866 {
13867 /* memory */
13868 switch (pVCpu->iem.s.enmEffOpSize)
13869 {
13870 case IEMMODE_16BIT:
13871 IEM_MC_BEGIN(3, 2);
13872 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13873 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13874 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13876
13877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13879 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13880 IEM_MC_FETCH_EFLAGS(EFlags);
13881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13882
13883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13884 IEM_MC_COMMIT_EFLAGS(EFlags);
13885 IEM_MC_ADVANCE_RIP();
13886 IEM_MC_END();
13887 return VINF_SUCCESS;
13888
13889 case IEMMODE_32BIT:
13890 IEM_MC_BEGIN(3, 2);
13891 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13892 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13893 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13895
13896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13898 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13899 IEM_MC_FETCH_EFLAGS(EFlags);
13900 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13901
13902 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13903 IEM_MC_COMMIT_EFLAGS(EFlags);
13904 IEM_MC_ADVANCE_RIP();
13905 IEM_MC_END();
13906 return VINF_SUCCESS;
13907
13908 case IEMMODE_64BIT:
13909 IEM_MC_BEGIN(3, 2);
13910 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13911 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13912 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13914
13915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13917 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13918 IEM_MC_FETCH_EFLAGS(EFlags);
13919 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13920
13921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13922 IEM_MC_COMMIT_EFLAGS(EFlags);
13923 IEM_MC_ADVANCE_RIP();
13924 IEM_MC_END();
13925 return VINF_SUCCESS;
13926
13927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13928 }
13929 }
13930}
13931
13932
13933/** Opcode 0xd2. */
13934FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13935{
13936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13937 PCIEMOPSHIFTSIZES pImpl;
13938 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13939 {
13940 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13941 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13942 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13943 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13944 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13945 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13946 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13947 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13948 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13949 }
13950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13951
13952 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13953 {
13954 /* register */
13955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13956 IEM_MC_BEGIN(3, 0);
13957 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13960 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13961 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13962 IEM_MC_REF_EFLAGS(pEFlags);
13963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13964 IEM_MC_ADVANCE_RIP();
13965 IEM_MC_END();
13966 }
13967 else
13968 {
13969 /* memory */
13970 IEM_MC_BEGIN(3, 2);
13971 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13972 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13973 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13975
13976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13978 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13979 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13980 IEM_MC_FETCH_EFLAGS(EFlags);
13981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13982
13983 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13984 IEM_MC_COMMIT_EFLAGS(EFlags);
13985 IEM_MC_ADVANCE_RIP();
13986 IEM_MC_END();
13987 }
13988 return VINF_SUCCESS;
13989}
13990
13991
13992/** Opcode 0xd3. */
13993FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13994{
13995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13996 PCIEMOPSHIFTSIZES pImpl;
13997 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13998 {
13999 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
14000 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
14001 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
14002 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
14003 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
14004 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
14005 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
14006 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14007 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
14008 }
14009 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
14010
14011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14012 {
14013 /* register */
14014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14015 switch (pVCpu->iem.s.enmEffOpSize)
14016 {
14017 case IEMMODE_16BIT:
14018 IEM_MC_BEGIN(3, 0);
14019 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14020 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14021 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14022 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14024 IEM_MC_REF_EFLAGS(pEFlags);
14025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14026 IEM_MC_ADVANCE_RIP();
14027 IEM_MC_END();
14028 return VINF_SUCCESS;
14029
14030 case IEMMODE_32BIT:
14031 IEM_MC_BEGIN(3, 0);
14032 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14033 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14035 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14036 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14037 IEM_MC_REF_EFLAGS(pEFlags);
14038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14039 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14040 IEM_MC_ADVANCE_RIP();
14041 IEM_MC_END();
14042 return VINF_SUCCESS;
14043
14044 case IEMMODE_64BIT:
14045 IEM_MC_BEGIN(3, 0);
14046 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14047 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14049 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14050 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14051 IEM_MC_REF_EFLAGS(pEFlags);
14052 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14053 IEM_MC_ADVANCE_RIP();
14054 IEM_MC_END();
14055 return VINF_SUCCESS;
14056
14057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14058 }
14059 }
14060 else
14061 {
14062 /* memory */
14063 switch (pVCpu->iem.s.enmEffOpSize)
14064 {
14065 case IEMMODE_16BIT:
14066 IEM_MC_BEGIN(3, 2);
14067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14068 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14069 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14071
14072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14074 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14075 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14076 IEM_MC_FETCH_EFLAGS(EFlags);
14077 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14078
14079 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14080 IEM_MC_COMMIT_EFLAGS(EFlags);
14081 IEM_MC_ADVANCE_RIP();
14082 IEM_MC_END();
14083 return VINF_SUCCESS;
14084
14085 case IEMMODE_32BIT:
14086 IEM_MC_BEGIN(3, 2);
14087 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14088 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14089 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14091
14092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14094 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14095 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14096 IEM_MC_FETCH_EFLAGS(EFlags);
14097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14098
14099 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14100 IEM_MC_COMMIT_EFLAGS(EFlags);
14101 IEM_MC_ADVANCE_RIP();
14102 IEM_MC_END();
14103 return VINF_SUCCESS;
14104
14105 case IEMMODE_64BIT:
14106 IEM_MC_BEGIN(3, 2);
14107 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14108 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14109 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14111
14112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14114 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14115 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14116 IEM_MC_FETCH_EFLAGS(EFlags);
14117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14118
14119 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14120 IEM_MC_COMMIT_EFLAGS(EFlags);
14121 IEM_MC_ADVANCE_RIP();
14122 IEM_MC_END();
14123 return VINF_SUCCESS;
14124
14125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14126 }
14127 }
14128}
14129
14130/** Opcode 0xd4. */
14131FNIEMOP_DEF(iemOp_aam_Ib)
14132{
14133 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14134 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14136 IEMOP_HLP_NO_64BIT();
14137 if (!bImm)
14138 return IEMOP_RAISE_DIVIDE_ERROR();
14139 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14140}
14141
14142
14143/** Opcode 0xd5. */
14144FNIEMOP_DEF(iemOp_aad_Ib)
14145{
14146 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14147 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14149 IEMOP_HLP_NO_64BIT();
14150 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14151}
14152
14153
14154/** Opcode 0xd6. */
14155FNIEMOP_DEF(iemOp_salc)
14156{
14157 IEMOP_MNEMONIC(salc, "salc");
14158 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14161 IEMOP_HLP_NO_64BIT();
14162
14163 IEM_MC_BEGIN(0, 0);
14164 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14165 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14166 } IEM_MC_ELSE() {
14167 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14168 } IEM_MC_ENDIF();
14169 IEM_MC_ADVANCE_RIP();
14170 IEM_MC_END();
14171 return VINF_SUCCESS;
14172}
14173
14174
14175/** Opcode 0xd7. */
14176FNIEMOP_DEF(iemOp_xlat)
14177{
14178 IEMOP_MNEMONIC(xlat, "xlat");
14179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14180 switch (pVCpu->iem.s.enmEffAddrMode)
14181 {
14182 case IEMMODE_16BIT:
14183 IEM_MC_BEGIN(2, 0);
14184 IEM_MC_LOCAL(uint8_t, u8Tmp);
14185 IEM_MC_LOCAL(uint16_t, u16Addr);
14186 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14187 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14188 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14189 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14190 IEM_MC_ADVANCE_RIP();
14191 IEM_MC_END();
14192 return VINF_SUCCESS;
14193
14194 case IEMMODE_32BIT:
14195 IEM_MC_BEGIN(2, 0);
14196 IEM_MC_LOCAL(uint8_t, u8Tmp);
14197 IEM_MC_LOCAL(uint32_t, u32Addr);
14198 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14199 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14200 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14201 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14202 IEM_MC_ADVANCE_RIP();
14203 IEM_MC_END();
14204 return VINF_SUCCESS;
14205
14206 case IEMMODE_64BIT:
14207 IEM_MC_BEGIN(2, 0);
14208 IEM_MC_LOCAL(uint8_t, u8Tmp);
14209 IEM_MC_LOCAL(uint64_t, u64Addr);
14210 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14211 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14212 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14213 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14214 IEM_MC_ADVANCE_RIP();
14215 IEM_MC_END();
14216 return VINF_SUCCESS;
14217
14218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14219 }
14220}
14221
14222
14223/**
14224 * Common worker for FPU instructions working on ST0 and STn, and storing the
14225 * result in ST0.
14226 *
14227 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14228 */
14229FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14230{
14231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14232
14233 IEM_MC_BEGIN(3, 1);
14234 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14235 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14238
14239 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14240 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14241 IEM_MC_PREPARE_FPU_USAGE();
14242 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14243 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14244 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14245 IEM_MC_ELSE()
14246 IEM_MC_FPU_STACK_UNDERFLOW(0);
14247 IEM_MC_ENDIF();
14248 IEM_MC_ADVANCE_RIP();
14249
14250 IEM_MC_END();
14251 return VINF_SUCCESS;
14252}
14253
14254
14255/**
14256 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14257 * flags.
14258 *
14259 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14260 */
14261FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14262{
14263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14264
14265 IEM_MC_BEGIN(3, 1);
14266 IEM_MC_LOCAL(uint16_t, u16Fsw);
14267 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14268 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14269 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14270
14271 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14272 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14273 IEM_MC_PREPARE_FPU_USAGE();
14274 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14275 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14276 IEM_MC_UPDATE_FSW(u16Fsw);
14277 IEM_MC_ELSE()
14278 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14279 IEM_MC_ENDIF();
14280 IEM_MC_ADVANCE_RIP();
14281
14282 IEM_MC_END();
14283 return VINF_SUCCESS;
14284}
14285
14286
14287/**
14288 * Common worker for FPU instructions working on ST0 and STn, only affecting
14289 * flags, and popping when done.
14290 *
14291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14292 */
14293FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14294{
14295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14296
14297 IEM_MC_BEGIN(3, 1);
14298 IEM_MC_LOCAL(uint16_t, u16Fsw);
14299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14302
14303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14305 IEM_MC_PREPARE_FPU_USAGE();
14306 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14307 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14308 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14309 IEM_MC_ELSE()
14310 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14311 IEM_MC_ENDIF();
14312 IEM_MC_ADVANCE_RIP();
14313
14314 IEM_MC_END();
14315 return VINF_SUCCESS;
14316}
14317
14318
14319/** Opcode 0xd8 11/0. */
14320FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14321{
14322 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14323 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14324}
14325
14326
14327/** Opcode 0xd8 11/1. */
14328FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14329{
14330 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14331 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14332}
14333
14334
14335/** Opcode 0xd8 11/2. */
14336FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14337{
14338 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14339 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14340}
14341
14342
14343/** Opcode 0xd8 11/3. */
14344FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14345{
14346 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14347 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14348}
14349
14350
14351/** Opcode 0xd8 11/4. */
14352FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14353{
14354 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14355 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14356}
14357
14358
14359/** Opcode 0xd8 11/5. */
14360FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14361{
14362 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14363 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14364}
14365
14366
14367/** Opcode 0xd8 11/6. */
14368FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14369{
14370 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14371 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14372}
14373
14374
14375/** Opcode 0xd8 11/7. */
14376FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14377{
14378 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14379 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14380}
14381
14382
14383/**
14384 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14385 * the result in ST0.
14386 *
14387 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14388 */
14389FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14390{
14391 IEM_MC_BEGIN(3, 3);
14392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14393 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14394 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14395 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14396 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14397 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14398
14399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14401
14402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14404 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14405
14406 IEM_MC_PREPARE_FPU_USAGE();
14407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14408 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14409 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14410 IEM_MC_ELSE()
14411 IEM_MC_FPU_STACK_UNDERFLOW(0);
14412 IEM_MC_ENDIF();
14413 IEM_MC_ADVANCE_RIP();
14414
14415 IEM_MC_END();
14416 return VINF_SUCCESS;
14417}
14418
14419
14420/** Opcode 0xd8 !11/0. */
14421FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14422{
14423 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14425}
14426
14427
14428/** Opcode 0xd8 !11/1. */
14429FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14430{
14431 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14432 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14433}
14434
14435
14436/** Opcode 0xd8 !11/2. */
14437FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14438{
14439 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14440
14441 IEM_MC_BEGIN(3, 3);
14442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14443 IEM_MC_LOCAL(uint16_t, u16Fsw);
14444 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14445 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14446 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14447 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14448
14449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14451
14452 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14453 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14454 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14455
14456 IEM_MC_PREPARE_FPU_USAGE();
14457 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14458 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14459 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14460 IEM_MC_ELSE()
14461 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14462 IEM_MC_ENDIF();
14463 IEM_MC_ADVANCE_RIP();
14464
14465 IEM_MC_END();
14466 return VINF_SUCCESS;
14467}
14468
14469
14470/** Opcode 0xd8 !11/3. */
14471FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14472{
14473 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14474
14475 IEM_MC_BEGIN(3, 3);
14476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14477 IEM_MC_LOCAL(uint16_t, u16Fsw);
14478 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14481 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14482
14483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14485
14486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14488 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14489
14490 IEM_MC_PREPARE_FPU_USAGE();
14491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14493 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14494 IEM_MC_ELSE()
14495 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14496 IEM_MC_ENDIF();
14497 IEM_MC_ADVANCE_RIP();
14498
14499 IEM_MC_END();
14500 return VINF_SUCCESS;
14501}
14502
14503
14504/** Opcode 0xd8 !11/4. */
14505FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14506{
14507 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14508 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14509}
14510
14511
14512/** Opcode 0xd8 !11/5. */
14513FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14514{
14515 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14516 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14517}
14518
14519
14520/** Opcode 0xd8 !11/6. */
14521FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14522{
14523 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14524 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14525}
14526
14527
14528/** Opcode 0xd8 !11/7. */
14529FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14530{
14531 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14533}
14534
14535
14536/** Opcode 0xd8. */
14537FNIEMOP_DEF(iemOp_EscF0)
14538{
14539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14540 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14541
14542 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14543 {
14544 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14545 {
14546 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14547 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14548 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14549 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14550 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14551 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14552 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14553 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14555 }
14556 }
14557 else
14558 {
14559 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14560 {
14561 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14562 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14563 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14564 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14565 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14566 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14567 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14568 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14570 }
14571 }
14572}
14573
14574
14575/** Opcode 0xd9 /0 mem32real
14576 * @sa iemOp_fld_m64r */
14577FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14578{
14579 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14580
14581 IEM_MC_BEGIN(2, 3);
14582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14583 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14584 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14585 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14586 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14587
14588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14590
14591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14593 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14594
14595 IEM_MC_PREPARE_FPU_USAGE();
14596 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14597 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14598 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14599 IEM_MC_ELSE()
14600 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14601 IEM_MC_ENDIF();
14602 IEM_MC_ADVANCE_RIP();
14603
14604 IEM_MC_END();
14605 return VINF_SUCCESS;
14606}
14607
14608
14609/** Opcode 0xd9 !11/2 mem32real */
14610FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14611{
14612 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14613 IEM_MC_BEGIN(3, 2);
14614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14615 IEM_MC_LOCAL(uint16_t, u16Fsw);
14616 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14617 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14619
14620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14624
14625 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14626 IEM_MC_PREPARE_FPU_USAGE();
14627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14628 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14629 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14630 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14631 IEM_MC_ELSE()
14632 IEM_MC_IF_FCW_IM()
14633 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14634 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14635 IEM_MC_ENDIF();
14636 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14637 IEM_MC_ENDIF();
14638 IEM_MC_ADVANCE_RIP();
14639
14640 IEM_MC_END();
14641 return VINF_SUCCESS;
14642}
14643
14644
14645/** Opcode 0xd9 !11/3 */
14646FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14647{
14648 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14649 IEM_MC_BEGIN(3, 2);
14650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14651 IEM_MC_LOCAL(uint16_t, u16Fsw);
14652 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14653 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14654 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14655
14656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14660
14661 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14662 IEM_MC_PREPARE_FPU_USAGE();
14663 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14664 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14665 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14666 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14667 IEM_MC_ELSE()
14668 IEM_MC_IF_FCW_IM()
14669 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14670 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14671 IEM_MC_ENDIF();
14672 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14673 IEM_MC_ENDIF();
14674 IEM_MC_ADVANCE_RIP();
14675
14676 IEM_MC_END();
14677 return VINF_SUCCESS;
14678}
14679
14680
14681/** Opcode 0xd9 !11/4 */
14682FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14683{
14684 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14685 IEM_MC_BEGIN(3, 0);
14686 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14687 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14688 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14692 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14693 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14694 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14695 IEM_MC_END();
14696 return VINF_SUCCESS;
14697}
14698
14699
14700/** Opcode 0xd9 !11/5 */
14701FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14702{
14703 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14704 IEM_MC_BEGIN(1, 1);
14705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14706 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14709 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14710 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14711 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14712 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14713 IEM_MC_END();
14714 return VINF_SUCCESS;
14715}
14716
14717
14718/** Opcode 0xd9 !11/6 */
14719FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14720{
14721 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14722 IEM_MC_BEGIN(3, 0);
14723 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14724 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14725 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14730 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14731 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14732 IEM_MC_END();
14733 return VINF_SUCCESS;
14734}
14735
14736
14737/** Opcode 0xd9 !11/7 */
14738FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14739{
14740 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14741 IEM_MC_BEGIN(2, 0);
14742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14743 IEM_MC_LOCAL(uint16_t, u16Fcw);
14744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14747 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14748 IEM_MC_FETCH_FCW(u16Fcw);
14749 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14750 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14751 IEM_MC_END();
14752 return VINF_SUCCESS;
14753}
14754
14755
14756/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14757FNIEMOP_DEF(iemOp_fnop)
14758{
14759 IEMOP_MNEMONIC(fnop, "fnop");
14760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14761
14762 IEM_MC_BEGIN(0, 0);
14763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14765 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14766 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14767 * intel optimizations. Investigate. */
14768 IEM_MC_UPDATE_FPU_OPCODE_IP();
14769 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14770 IEM_MC_END();
14771 return VINF_SUCCESS;
14772}
14773
14774
14775/** Opcode 0xd9 11/0 stN */
14776FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14777{
14778 IEMOP_MNEMONIC(fld_stN, "fld stN");
14779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14780
14781 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14782 * indicates that it does. */
14783 IEM_MC_BEGIN(0, 2);
14784 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14785 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14788
14789 IEM_MC_PREPARE_FPU_USAGE();
14790 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14791 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14792 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14793 IEM_MC_ELSE()
14794 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14795 IEM_MC_ENDIF();
14796
14797 IEM_MC_ADVANCE_RIP();
14798 IEM_MC_END();
14799
14800 return VINF_SUCCESS;
14801}
14802
14803
14804/** Opcode 0xd9 11/3 stN */
14805FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14806{
14807 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14809
14810 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14811 * indicates that it does. */
14812 IEM_MC_BEGIN(1, 3);
14813 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14814 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14815 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14816 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14819
14820 IEM_MC_PREPARE_FPU_USAGE();
14821 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14822 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14823 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14824 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14825 IEM_MC_ELSE()
14826 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14827 IEM_MC_ENDIF();
14828
14829 IEM_MC_ADVANCE_RIP();
14830 IEM_MC_END();
14831
14832 return VINF_SUCCESS;
14833}
14834
14835
14836/** Opcode 0xd9 11/4, 0xdd 11/2. */
14837FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14838{
14839 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14841
14842 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14843 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14844 if (!iDstReg)
14845 {
14846 IEM_MC_BEGIN(0, 1);
14847 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14850
14851 IEM_MC_PREPARE_FPU_USAGE();
14852 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14853 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14854 IEM_MC_ELSE()
14855 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14856 IEM_MC_ENDIF();
14857
14858 IEM_MC_ADVANCE_RIP();
14859 IEM_MC_END();
14860 }
14861 else
14862 {
14863 IEM_MC_BEGIN(0, 2);
14864 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14865 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14868
14869 IEM_MC_PREPARE_FPU_USAGE();
14870 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14871 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14872 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14873 IEM_MC_ELSE()
14874 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14875 IEM_MC_ENDIF();
14876
14877 IEM_MC_ADVANCE_RIP();
14878 IEM_MC_END();
14879 }
14880 return VINF_SUCCESS;
14881}
14882
14883
14884/**
14885 * Common worker for FPU instructions working on ST0 and replaces it with the
14886 * result, i.e. unary operators.
14887 *
14888 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14889 */
14890FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14891{
14892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14893
14894 IEM_MC_BEGIN(2, 1);
14895 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14896 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14898
14899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14901 IEM_MC_PREPARE_FPU_USAGE();
14902 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14903 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14904 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14905 IEM_MC_ELSE()
14906 IEM_MC_FPU_STACK_UNDERFLOW(0);
14907 IEM_MC_ENDIF();
14908 IEM_MC_ADVANCE_RIP();
14909
14910 IEM_MC_END();
14911 return VINF_SUCCESS;
14912}
14913
14914
14915/** Opcode 0xd9 0xe0. */
14916FNIEMOP_DEF(iemOp_fchs)
14917{
14918 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14919 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14920}
14921
14922
14923/** Opcode 0xd9 0xe1. */
14924FNIEMOP_DEF(iemOp_fabs)
14925{
14926 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14927 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14928}
14929
14930
14931/**
14932 * Common worker for FPU instructions working on ST0 and only returns FSW.
14933 *
14934 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14935 */
14936FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14937{
14938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14939
14940 IEM_MC_BEGIN(2, 1);
14941 IEM_MC_LOCAL(uint16_t, u16Fsw);
14942 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14943 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14944
14945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14947 IEM_MC_PREPARE_FPU_USAGE();
14948 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14949 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14950 IEM_MC_UPDATE_FSW(u16Fsw);
14951 IEM_MC_ELSE()
14952 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14953 IEM_MC_ENDIF();
14954 IEM_MC_ADVANCE_RIP();
14955
14956 IEM_MC_END();
14957 return VINF_SUCCESS;
14958}
14959
14960
14961/** Opcode 0xd9 0xe4. */
14962FNIEMOP_DEF(iemOp_ftst)
14963{
14964 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14965 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14966}
14967
14968
14969/** Opcode 0xd9 0xe5. */
14970FNIEMOP_DEF(iemOp_fxam)
14971{
14972 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14973 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14974}
14975
14976
14977/**
14978 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14979 *
14980 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14981 */
14982FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14983{
14984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14985
14986 IEM_MC_BEGIN(1, 1);
14987 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14988 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14989
14990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14992 IEM_MC_PREPARE_FPU_USAGE();
14993 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14994 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14995 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14996 IEM_MC_ELSE()
14997 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14998 IEM_MC_ENDIF();
14999 IEM_MC_ADVANCE_RIP();
15000
15001 IEM_MC_END();
15002 return VINF_SUCCESS;
15003}
15004
15005
15006/** Opcode 0xd9 0xe8. */
15007FNIEMOP_DEF(iemOp_fld1)
15008{
15009 IEMOP_MNEMONIC(fld1, "fld1");
15010 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
15011}
15012
15013
15014/** Opcode 0xd9 0xe9. */
15015FNIEMOP_DEF(iemOp_fldl2t)
15016{
15017 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15018 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15019}
15020
15021
15022/** Opcode 0xd9 0xea. */
15023FNIEMOP_DEF(iemOp_fldl2e)
15024{
15025 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15026 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15027}
15028
15029/** Opcode 0xd9 0xeb. */
15030FNIEMOP_DEF(iemOp_fldpi)
15031{
15032 IEMOP_MNEMONIC(fldpi, "fldpi");
15033 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15034}
15035
15036
15037/** Opcode 0xd9 0xec. */
15038FNIEMOP_DEF(iemOp_fldlg2)
15039{
15040 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15041 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15042}
15043
15044/** Opcode 0xd9 0xed. */
15045FNIEMOP_DEF(iemOp_fldln2)
15046{
15047 IEMOP_MNEMONIC(fldln2, "fldln2");
15048 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15049}
15050
15051
15052/** Opcode 0xd9 0xee. */
15053FNIEMOP_DEF(iemOp_fldz)
15054{
15055 IEMOP_MNEMONIC(fldz, "fldz");
15056 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15057}
15058
15059
15060/** Opcode 0xd9 0xf0. */
15061FNIEMOP_DEF(iemOp_f2xm1)
15062{
15063 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15064 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15065}
15066
15067
15068/**
15069 * Common worker for FPU instructions working on STn and ST0, storing the result
15070 * in STn, and popping the stack unless IE, DE or ZE was raised.
15071 *
15072 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15073 */
15074FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15075{
15076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15077
15078 IEM_MC_BEGIN(3, 1);
15079 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15080 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15083
15084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15086
15087 IEM_MC_PREPARE_FPU_USAGE();
15088 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15089 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15090 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15091 IEM_MC_ELSE()
15092 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15093 IEM_MC_ENDIF();
15094 IEM_MC_ADVANCE_RIP();
15095
15096 IEM_MC_END();
15097 return VINF_SUCCESS;
15098}
15099
15100
15101/** Opcode 0xd9 0xf1. */
15102FNIEMOP_DEF(iemOp_fyl2x)
15103{
15104 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15105 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15106}
15107
15108
15109/**
15110 * Common worker for FPU instructions working on ST0 and having two outputs, one
15111 * replacing ST0 and one pushed onto the stack.
15112 *
15113 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15114 */
15115FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15116{
15117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15118
15119 IEM_MC_BEGIN(2, 1);
15120 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15121 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15123
15124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15126 IEM_MC_PREPARE_FPU_USAGE();
15127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15128 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15129 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15130 IEM_MC_ELSE()
15131 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15132 IEM_MC_ENDIF();
15133 IEM_MC_ADVANCE_RIP();
15134
15135 IEM_MC_END();
15136 return VINF_SUCCESS;
15137}
15138
15139
15140/** Opcode 0xd9 0xf2. */
15141FNIEMOP_DEF(iemOp_fptan)
15142{
15143 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15144 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15145}
15146
15147
15148/** Opcode 0xd9 0xf3. */
15149FNIEMOP_DEF(iemOp_fpatan)
15150{
15151 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15152 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15153}
15154
15155
15156/** Opcode 0xd9 0xf4. */
15157FNIEMOP_DEF(iemOp_fxtract)
15158{
15159 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15160 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15161}
15162
15163
15164/** Opcode 0xd9 0xf5. */
15165FNIEMOP_DEF(iemOp_fprem1)
15166{
15167 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15168 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15169}
15170
15171
15172/** Opcode 0xd9 0xf6. */
15173FNIEMOP_DEF(iemOp_fdecstp)
15174{
15175 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15177 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15178 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15179 * FINCSTP and FDECSTP. */
15180
15181 IEM_MC_BEGIN(0,0);
15182
15183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15184 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15185
15186 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15187 IEM_MC_FPU_STACK_DEC_TOP();
15188 IEM_MC_UPDATE_FSW_CONST(0);
15189
15190 IEM_MC_ADVANCE_RIP();
15191 IEM_MC_END();
15192 return VINF_SUCCESS;
15193}
15194
15195
15196/** Opcode 0xd9 0xf7. */
15197FNIEMOP_DEF(iemOp_fincstp)
15198{
15199 IEMOP_MNEMONIC(fincstp, "fincstp");
15200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15201 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15202 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15203 * FINCSTP and FDECSTP. */
15204
15205 IEM_MC_BEGIN(0,0);
15206
15207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15208 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15209
15210 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15211 IEM_MC_FPU_STACK_INC_TOP();
15212 IEM_MC_UPDATE_FSW_CONST(0);
15213
15214 IEM_MC_ADVANCE_RIP();
15215 IEM_MC_END();
15216 return VINF_SUCCESS;
15217}
15218
15219
15220/** Opcode 0xd9 0xf8. */
15221FNIEMOP_DEF(iemOp_fprem)
15222{
15223 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15224 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15225}
15226
15227
15228/** Opcode 0xd9 0xf9. */
15229FNIEMOP_DEF(iemOp_fyl2xp1)
15230{
15231 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15232 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15233}
15234
15235
15236/** Opcode 0xd9 0xfa. */
15237FNIEMOP_DEF(iemOp_fsqrt)
15238{
15239 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15240 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15241}
15242
15243
15244/** Opcode 0xd9 0xfb. */
15245FNIEMOP_DEF(iemOp_fsincos)
15246{
15247 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15248 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15249}
15250
15251
15252/** Opcode 0xd9 0xfc. */
15253FNIEMOP_DEF(iemOp_frndint)
15254{
15255 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15256 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15257}
15258
15259
15260/** Opcode 0xd9 0xfd. */
15261FNIEMOP_DEF(iemOp_fscale)
15262{
15263 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15264 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15265}
15266
15267
15268/** Opcode 0xd9 0xfe. */
15269FNIEMOP_DEF(iemOp_fsin)
15270{
15271 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15272 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15273}
15274
15275
15276/** Opcode 0xd9 0xff. */
15277FNIEMOP_DEF(iemOp_fcos)
15278{
15279 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15280 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15281}
15282
15283
15284/** Used by iemOp_EscF1. */
15285IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15286{
15287 /* 0xe0 */ iemOp_fchs,
15288 /* 0xe1 */ iemOp_fabs,
15289 /* 0xe2 */ iemOp_Invalid,
15290 /* 0xe3 */ iemOp_Invalid,
15291 /* 0xe4 */ iemOp_ftst,
15292 /* 0xe5 */ iemOp_fxam,
15293 /* 0xe6 */ iemOp_Invalid,
15294 /* 0xe7 */ iemOp_Invalid,
15295 /* 0xe8 */ iemOp_fld1,
15296 /* 0xe9 */ iemOp_fldl2t,
15297 /* 0xea */ iemOp_fldl2e,
15298 /* 0xeb */ iemOp_fldpi,
15299 /* 0xec */ iemOp_fldlg2,
15300 /* 0xed */ iemOp_fldln2,
15301 /* 0xee */ iemOp_fldz,
15302 /* 0xef */ iemOp_Invalid,
15303 /* 0xf0 */ iemOp_f2xm1,
15304 /* 0xf1 */ iemOp_fyl2x,
15305 /* 0xf2 */ iemOp_fptan,
15306 /* 0xf3 */ iemOp_fpatan,
15307 /* 0xf4 */ iemOp_fxtract,
15308 /* 0xf5 */ iemOp_fprem1,
15309 /* 0xf6 */ iemOp_fdecstp,
15310 /* 0xf7 */ iemOp_fincstp,
15311 /* 0xf8 */ iemOp_fprem,
15312 /* 0xf9 */ iemOp_fyl2xp1,
15313 /* 0xfa */ iemOp_fsqrt,
15314 /* 0xfb */ iemOp_fsincos,
15315 /* 0xfc */ iemOp_frndint,
15316 /* 0xfd */ iemOp_fscale,
15317 /* 0xfe */ iemOp_fsin,
15318 /* 0xff */ iemOp_fcos
15319};
15320
15321
15322/** Opcode 0xd9. */
15323FNIEMOP_DEF(iemOp_EscF1)
15324{
15325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15326 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15327
15328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15329 {
15330 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15331 {
15332 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15333 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15334 case 2:
15335 if (bRm == 0xd0)
15336 return FNIEMOP_CALL(iemOp_fnop);
15337 return IEMOP_RAISE_INVALID_OPCODE();
15338 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15339 case 4:
15340 case 5:
15341 case 6:
15342 case 7:
15343 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15344 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15346 }
15347 }
15348 else
15349 {
15350 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15351 {
15352 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15353 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15354 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15355 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15356 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15357 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15358 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15359 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15361 }
15362 }
15363}
15364
15365
15366/** Opcode 0xda 11/0. */
15367FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15368{
15369 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15371
15372 IEM_MC_BEGIN(0, 1);
15373 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15374
15375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15377
15378 IEM_MC_PREPARE_FPU_USAGE();
15379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15381 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15382 IEM_MC_ENDIF();
15383 IEM_MC_UPDATE_FPU_OPCODE_IP();
15384 IEM_MC_ELSE()
15385 IEM_MC_FPU_STACK_UNDERFLOW(0);
15386 IEM_MC_ENDIF();
15387 IEM_MC_ADVANCE_RIP();
15388
15389 IEM_MC_END();
15390 return VINF_SUCCESS;
15391}
15392
15393
15394/** Opcode 0xda 11/1. */
15395FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15396{
15397 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15399
15400 IEM_MC_BEGIN(0, 1);
15401 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15402
15403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15405
15406 IEM_MC_PREPARE_FPU_USAGE();
15407 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15409 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15410 IEM_MC_ENDIF();
15411 IEM_MC_UPDATE_FPU_OPCODE_IP();
15412 IEM_MC_ELSE()
15413 IEM_MC_FPU_STACK_UNDERFLOW(0);
15414 IEM_MC_ENDIF();
15415 IEM_MC_ADVANCE_RIP();
15416
15417 IEM_MC_END();
15418 return VINF_SUCCESS;
15419}
15420
15421
15422/** Opcode 0xda 11/2. */
15423FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15424{
15425 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15427
15428 IEM_MC_BEGIN(0, 1);
15429 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15430
15431 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15432 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15433
15434 IEM_MC_PREPARE_FPU_USAGE();
15435 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15436 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15437 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15438 IEM_MC_ENDIF();
15439 IEM_MC_UPDATE_FPU_OPCODE_IP();
15440 IEM_MC_ELSE()
15441 IEM_MC_FPU_STACK_UNDERFLOW(0);
15442 IEM_MC_ENDIF();
15443 IEM_MC_ADVANCE_RIP();
15444
15445 IEM_MC_END();
15446 return VINF_SUCCESS;
15447}
15448
15449
15450/** Opcode 0xda 11/3. */
15451FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15452{
15453 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15455
15456 IEM_MC_BEGIN(0, 1);
15457 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15458
15459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15461
15462 IEM_MC_PREPARE_FPU_USAGE();
15463 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15465 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15466 IEM_MC_ENDIF();
15467 IEM_MC_UPDATE_FPU_OPCODE_IP();
15468 IEM_MC_ELSE()
15469 IEM_MC_FPU_STACK_UNDERFLOW(0);
15470 IEM_MC_ENDIF();
15471 IEM_MC_ADVANCE_RIP();
15472
15473 IEM_MC_END();
15474 return VINF_SUCCESS;
15475}
15476
15477
15478/**
15479 * Common worker for FPU instructions working on ST0 and STn, only affecting
15480 * flags, and popping twice when done.
15481 *
15482 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15483 */
15484FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15485{
15486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15487
15488 IEM_MC_BEGIN(3, 1);
15489 IEM_MC_LOCAL(uint16_t, u16Fsw);
15490 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15491 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15493
15494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15496
15497 IEM_MC_PREPARE_FPU_USAGE();
15498 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15499 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15500 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15501 IEM_MC_ELSE()
15502 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15503 IEM_MC_ENDIF();
15504 IEM_MC_ADVANCE_RIP();
15505
15506 IEM_MC_END();
15507 return VINF_SUCCESS;
15508}
15509
15510
15511/** Opcode 0xda 0xe9. */
15512FNIEMOP_DEF(iemOp_fucompp)
15513{
15514 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15515 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15516}
15517
15518
15519/**
15520 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15521 * the result in ST0.
15522 *
15523 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15524 */
15525FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15526{
15527 IEM_MC_BEGIN(3, 3);
15528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15529 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15530 IEM_MC_LOCAL(int32_t, i32Val2);
15531 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15533 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15534
15535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15537
15538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15540 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15541
15542 IEM_MC_PREPARE_FPU_USAGE();
15543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15544 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15545 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15546 IEM_MC_ELSE()
15547 IEM_MC_FPU_STACK_UNDERFLOW(0);
15548 IEM_MC_ENDIF();
15549 IEM_MC_ADVANCE_RIP();
15550
15551 IEM_MC_END();
15552 return VINF_SUCCESS;
15553}
15554
15555
15556/** Opcode 0xda !11/0. */
15557FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15558{
15559 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15561}
15562
15563
15564/** Opcode 0xda !11/1. */
15565FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15566{
15567 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15569}
15570
15571
15572/** Opcode 0xda !11/2. */
15573FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15574{
15575 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15576
15577 IEM_MC_BEGIN(3, 3);
15578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15579 IEM_MC_LOCAL(uint16_t, u16Fsw);
15580 IEM_MC_LOCAL(int32_t, i32Val2);
15581 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15583 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15584
15585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15587
15588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15590 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15591
15592 IEM_MC_PREPARE_FPU_USAGE();
15593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15594 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15595 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15596 IEM_MC_ELSE()
15597 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15598 IEM_MC_ENDIF();
15599 IEM_MC_ADVANCE_RIP();
15600
15601 IEM_MC_END();
15602 return VINF_SUCCESS;
15603}
15604
15605
15606/** Opcode 0xda !11/3. */
15607FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15608{
15609 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15610
15611 IEM_MC_BEGIN(3, 3);
15612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15613 IEM_MC_LOCAL(uint16_t, u16Fsw);
15614 IEM_MC_LOCAL(int32_t, i32Val2);
15615 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15616 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15617 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15618
15619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15621
15622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15624 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15625
15626 IEM_MC_PREPARE_FPU_USAGE();
15627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15628 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15629 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15630 IEM_MC_ELSE()
15631 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15632 IEM_MC_ENDIF();
15633 IEM_MC_ADVANCE_RIP();
15634
15635 IEM_MC_END();
15636 return VINF_SUCCESS;
15637}
15638
15639
15640/** Opcode 0xda !11/4. */
15641FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15642{
15643 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15645}
15646
15647
15648/** Opcode 0xda !11/5. */
15649FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15650{
15651 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15652 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15653}
15654
15655
15656/** Opcode 0xda !11/6. */
15657FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15658{
15659 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15660 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15661}
15662
15663
15664/** Opcode 0xda !11/7. */
15665FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15666{
15667 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15668 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15669}
15670
15671
15672/** Opcode 0xda. */
15673FNIEMOP_DEF(iemOp_EscF2)
15674{
15675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15676 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15678 {
15679 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15680 {
15681 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15682 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15683 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15684 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15685 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15686 case 5:
15687 if (bRm == 0xe9)
15688 return FNIEMOP_CALL(iemOp_fucompp);
15689 return IEMOP_RAISE_INVALID_OPCODE();
15690 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15691 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15693 }
15694 }
15695 else
15696 {
15697 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15698 {
15699 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15700 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15701 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15702 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15703 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15704 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15705 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15706 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15708 }
15709 }
15710}
15711
15712
15713/** Opcode 0xdb !11/0. */
15714FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15715{
15716 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15717
15718 IEM_MC_BEGIN(2, 3);
15719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15720 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15721 IEM_MC_LOCAL(int32_t, i32Val);
15722 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15723 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15724
15725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15727
15728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15730 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15731
15732 IEM_MC_PREPARE_FPU_USAGE();
15733 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15734 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15735 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15736 IEM_MC_ELSE()
15737 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15738 IEM_MC_ENDIF();
15739 IEM_MC_ADVANCE_RIP();
15740
15741 IEM_MC_END();
15742 return VINF_SUCCESS;
15743}
15744
15745
15746/** Opcode 0xdb !11/1. */
15747FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15748{
15749 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15750 IEM_MC_BEGIN(3, 2);
15751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15752 IEM_MC_LOCAL(uint16_t, u16Fsw);
15753 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15754 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15755 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15756
15757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15761
15762 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15763 IEM_MC_PREPARE_FPU_USAGE();
15764 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15765 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15766 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15767 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15768 IEM_MC_ELSE()
15769 IEM_MC_IF_FCW_IM()
15770 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15771 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15772 IEM_MC_ENDIF();
15773 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15774 IEM_MC_ENDIF();
15775 IEM_MC_ADVANCE_RIP();
15776
15777 IEM_MC_END();
15778 return VINF_SUCCESS;
15779}
15780
15781
15782/** Opcode 0xdb !11/2. */
15783FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15784{
15785 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15786 IEM_MC_BEGIN(3, 2);
15787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15788 IEM_MC_LOCAL(uint16_t, u16Fsw);
15789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15790 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15791 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15792
15793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15797
15798 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15799 IEM_MC_PREPARE_FPU_USAGE();
15800 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15801 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15802 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15803 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15804 IEM_MC_ELSE()
15805 IEM_MC_IF_FCW_IM()
15806 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15807 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15808 IEM_MC_ENDIF();
15809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15810 IEM_MC_ENDIF();
15811 IEM_MC_ADVANCE_RIP();
15812
15813 IEM_MC_END();
15814 return VINF_SUCCESS;
15815}
15816
15817
15818/** Opcode 0xdb !11/3. */
15819FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15820{
15821 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15822 IEM_MC_BEGIN(3, 2);
15823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15824 IEM_MC_LOCAL(uint16_t, u16Fsw);
15825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15826 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15828
15829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15833
15834 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15835 IEM_MC_PREPARE_FPU_USAGE();
15836 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15837 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15838 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15839 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15840 IEM_MC_ELSE()
15841 IEM_MC_IF_FCW_IM()
15842 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15843 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15844 IEM_MC_ENDIF();
15845 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15846 IEM_MC_ENDIF();
15847 IEM_MC_ADVANCE_RIP();
15848
15849 IEM_MC_END();
15850 return VINF_SUCCESS;
15851}
15852
15853
15854/** Opcode 0xdb !11/5. */
15855FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15856{
15857 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15858
15859 IEM_MC_BEGIN(2, 3);
15860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15861 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15862 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15863 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15864 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15865
15866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15868
15869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15871 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15872
15873 IEM_MC_PREPARE_FPU_USAGE();
15874 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15875 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15876 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15877 IEM_MC_ELSE()
15878 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15879 IEM_MC_ENDIF();
15880 IEM_MC_ADVANCE_RIP();
15881
15882 IEM_MC_END();
15883 return VINF_SUCCESS;
15884}
15885
15886
15887/** Opcode 0xdb !11/7. */
15888FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15889{
15890 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15891 IEM_MC_BEGIN(3, 2);
15892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15893 IEM_MC_LOCAL(uint16_t, u16Fsw);
15894 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15895 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15896 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15897
15898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15902
15903 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15904 IEM_MC_PREPARE_FPU_USAGE();
15905 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15906 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15907 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15908 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15909 IEM_MC_ELSE()
15910 IEM_MC_IF_FCW_IM()
15911 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15912 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15913 IEM_MC_ENDIF();
15914 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15915 IEM_MC_ENDIF();
15916 IEM_MC_ADVANCE_RIP();
15917
15918 IEM_MC_END();
15919 return VINF_SUCCESS;
15920}
15921
15922
15923/** Opcode 0xdb 11/0. */
15924FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15925{
15926 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15928
15929 IEM_MC_BEGIN(0, 1);
15930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15931
15932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15934
15935 IEM_MC_PREPARE_FPU_USAGE();
15936 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15937 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15938 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15939 IEM_MC_ENDIF();
15940 IEM_MC_UPDATE_FPU_OPCODE_IP();
15941 IEM_MC_ELSE()
15942 IEM_MC_FPU_STACK_UNDERFLOW(0);
15943 IEM_MC_ENDIF();
15944 IEM_MC_ADVANCE_RIP();
15945
15946 IEM_MC_END();
15947 return VINF_SUCCESS;
15948}
15949
15950
15951/** Opcode 0xdb 11/1. */
15952FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15953{
15954 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15956
15957 IEM_MC_BEGIN(0, 1);
15958 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15959
15960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15962
15963 IEM_MC_PREPARE_FPU_USAGE();
15964 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15965 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15966 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15967 IEM_MC_ENDIF();
15968 IEM_MC_UPDATE_FPU_OPCODE_IP();
15969 IEM_MC_ELSE()
15970 IEM_MC_FPU_STACK_UNDERFLOW(0);
15971 IEM_MC_ENDIF();
15972 IEM_MC_ADVANCE_RIP();
15973
15974 IEM_MC_END();
15975 return VINF_SUCCESS;
15976}
15977
15978
15979/** Opcode 0xdb 11/2. */
15980FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15981{
15982 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15984
15985 IEM_MC_BEGIN(0, 1);
15986 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15987
15988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15990
15991 IEM_MC_PREPARE_FPU_USAGE();
15992 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15993 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15994 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15995 IEM_MC_ENDIF();
15996 IEM_MC_UPDATE_FPU_OPCODE_IP();
15997 IEM_MC_ELSE()
15998 IEM_MC_FPU_STACK_UNDERFLOW(0);
15999 IEM_MC_ENDIF();
16000 IEM_MC_ADVANCE_RIP();
16001
16002 IEM_MC_END();
16003 return VINF_SUCCESS;
16004}
16005
16006
16007/** Opcode 0xdb 11/3. */
16008FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
16009{
16010 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
16011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16012
16013 IEM_MC_BEGIN(0, 1);
16014 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16015
16016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16018
16019 IEM_MC_PREPARE_FPU_USAGE();
16020 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16022 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16023 IEM_MC_ENDIF();
16024 IEM_MC_UPDATE_FPU_OPCODE_IP();
16025 IEM_MC_ELSE()
16026 IEM_MC_FPU_STACK_UNDERFLOW(0);
16027 IEM_MC_ENDIF();
16028 IEM_MC_ADVANCE_RIP();
16029
16030 IEM_MC_END();
16031 return VINF_SUCCESS;
16032}
16033
16034
16035/** Opcode 0xdb 0xe0. */
16036FNIEMOP_DEF(iemOp_fneni)
16037{
16038 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16040 IEM_MC_BEGIN(0,0);
16041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16042 IEM_MC_ADVANCE_RIP();
16043 IEM_MC_END();
16044 return VINF_SUCCESS;
16045}
16046
16047
16048/** Opcode 0xdb 0xe1. */
16049FNIEMOP_DEF(iemOp_fndisi)
16050{
16051 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16053 IEM_MC_BEGIN(0,0);
16054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16055 IEM_MC_ADVANCE_RIP();
16056 IEM_MC_END();
16057 return VINF_SUCCESS;
16058}
16059
16060
16061/** Opcode 0xdb 0xe2. */
16062FNIEMOP_DEF(iemOp_fnclex)
16063{
16064 IEMOP_MNEMONIC(fnclex, "fnclex");
16065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16066
16067 IEM_MC_BEGIN(0,0);
16068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16069 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16070 IEM_MC_CLEAR_FSW_EX();
16071 IEM_MC_ADVANCE_RIP();
16072 IEM_MC_END();
16073 return VINF_SUCCESS;
16074}
16075
16076
16077/** Opcode 0xdb 0xe3. */
16078FNIEMOP_DEF(iemOp_fninit)
16079{
16080 IEMOP_MNEMONIC(fninit, "fninit");
16081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16082 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16083}
16084
16085
16086/** Opcode 0xdb 0xe4. */
16087FNIEMOP_DEF(iemOp_fnsetpm)
16088{
16089 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16091 IEM_MC_BEGIN(0,0);
16092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16093 IEM_MC_ADVANCE_RIP();
16094 IEM_MC_END();
16095 return VINF_SUCCESS;
16096}
16097
16098
16099/** Opcode 0xdb 0xe5. */
16100FNIEMOP_DEF(iemOp_frstpm)
16101{
16102 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16103#if 0 /* #UDs on newer CPUs */
16104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16105 IEM_MC_BEGIN(0,0);
16106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16107 IEM_MC_ADVANCE_RIP();
16108 IEM_MC_END();
16109 return VINF_SUCCESS;
16110#else
16111 return IEMOP_RAISE_INVALID_OPCODE();
16112#endif
16113}
16114
16115
16116/** Opcode 0xdb 11/5. */
16117FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16118{
16119 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16120 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16121}
16122
16123
16124/** Opcode 0xdb 11/6. */
16125FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16126{
16127 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16128 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16129}
16130
16131
16132/** Opcode 0xdb. */
16133FNIEMOP_DEF(iemOp_EscF3)
16134{
16135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16136 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16137 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16138 {
16139 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16140 {
16141 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16142 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16143 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16144 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16145 case 4:
16146 switch (bRm)
16147 {
16148 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16149 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16150 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16151 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16152 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16153 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16154 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16155 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16157 }
16158 break;
16159 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16160 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16161 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16163 }
16164 }
16165 else
16166 {
16167 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16168 {
16169 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16170 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16171 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16172 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16173 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16174 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16175 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16176 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16178 }
16179 }
16180}
16181
16182
16183/**
16184 * Common worker for FPU instructions working on STn and ST0, and storing the
16185 * result in STn unless IE, DE or ZE was raised.
16186 *
16187 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16188 */
16189FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16190{
16191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16192
16193 IEM_MC_BEGIN(3, 1);
16194 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16198
16199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16201
16202 IEM_MC_PREPARE_FPU_USAGE();
16203 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16204 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16205 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16206 IEM_MC_ELSE()
16207 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16208 IEM_MC_ENDIF();
16209 IEM_MC_ADVANCE_RIP();
16210
16211 IEM_MC_END();
16212 return VINF_SUCCESS;
16213}
16214
16215
16216/** Opcode 0xdc 11/0. */
16217FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16218{
16219 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16220 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16221}
16222
16223
16224/** Opcode 0xdc 11/1. */
16225FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16226{
16227 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16228 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16229}
16230
16231
16232/** Opcode 0xdc 11/4. */
16233FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16234{
16235 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16236 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16237}
16238
16239
16240/** Opcode 0xdc 11/5. */
16241FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16242{
16243 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16244 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16245}
16246
16247
16248/** Opcode 0xdc 11/6. */
16249FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16250{
16251 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16252 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16253}
16254
16255
16256/** Opcode 0xdc 11/7. */
16257FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16258{
16259 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16260 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16261}
16262
16263
16264/**
16265 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16266 * memory operand, and storing the result in ST0.
16267 *
16268 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16269 */
16270FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16271{
16272 IEM_MC_BEGIN(3, 3);
16273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16275 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16276 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16277 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16278 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16279
16280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16284
16285 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16286 IEM_MC_PREPARE_FPU_USAGE();
16287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16288 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16289 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16290 IEM_MC_ELSE()
16291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16292 IEM_MC_ENDIF();
16293 IEM_MC_ADVANCE_RIP();
16294
16295 IEM_MC_END();
16296 return VINF_SUCCESS;
16297}
16298
16299
16300/** Opcode 0xdc !11/0. */
16301FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16302{
16303 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16304 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16305}
16306
16307
16308/** Opcode 0xdc !11/1. */
16309FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16310{
16311 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16312 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16313}
16314
16315
16316/** Opcode 0xdc !11/2. */
16317FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16318{
16319 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16320
16321 IEM_MC_BEGIN(3, 3);
16322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16323 IEM_MC_LOCAL(uint16_t, u16Fsw);
16324 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16325 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16327 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16328
16329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16331
16332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16334 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16335
16336 IEM_MC_PREPARE_FPU_USAGE();
16337 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16338 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16339 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16340 IEM_MC_ELSE()
16341 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16342 IEM_MC_ENDIF();
16343 IEM_MC_ADVANCE_RIP();
16344
16345 IEM_MC_END();
16346 return VINF_SUCCESS;
16347}
16348
16349
16350/** Opcode 0xdc !11/3. */
16351FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16352{
16353 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16354
16355 IEM_MC_BEGIN(3, 3);
16356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16357 IEM_MC_LOCAL(uint16_t, u16Fsw);
16358 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16359 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16361 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16362
16363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16365
16366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16368 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16369
16370 IEM_MC_PREPARE_FPU_USAGE();
16371 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16372 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16373 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16374 IEM_MC_ELSE()
16375 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16376 IEM_MC_ENDIF();
16377 IEM_MC_ADVANCE_RIP();
16378
16379 IEM_MC_END();
16380 return VINF_SUCCESS;
16381}
16382
16383
16384/** Opcode 0xdc !11/4. */
16385FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16386{
16387 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16388 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16389}
16390
16391
16392/** Opcode 0xdc !11/5. */
16393FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16394{
16395 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16396 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16397}
16398
16399
16400/** Opcode 0xdc !11/6. */
16401FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16402{
16403 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16404 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16405}
16406
16407
16408/** Opcode 0xdc !11/7. */
16409FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16410{
16411 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16412 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16413}
16414
16415
16416/** Opcode 0xdc. */
16417FNIEMOP_DEF(iemOp_EscF4)
16418{
16419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16420 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16422 {
16423 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16424 {
16425 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16426 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16427 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16428 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16429 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16430 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16431 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16432 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16434 }
16435 }
16436 else
16437 {
16438 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16439 {
16440 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16441 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16442 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16443 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16444 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16445 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16446 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16447 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16449 }
16450 }
16451}
16452
16453
16454/** Opcode 0xdd !11/0.
16455 * @sa iemOp_fld_m32r */
16456FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16457{
16458 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16459
16460 IEM_MC_BEGIN(2, 3);
16461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16462 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16463 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16464 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16465 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16466
16467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16471
16472 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16473 IEM_MC_PREPARE_FPU_USAGE();
16474 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16475 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16476 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16477 IEM_MC_ELSE()
16478 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16479 IEM_MC_ENDIF();
16480 IEM_MC_ADVANCE_RIP();
16481
16482 IEM_MC_END();
16483 return VINF_SUCCESS;
16484}
16485
16486
16487/** Opcode 0xdd !11/0. */
16488FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16489{
16490 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16491 IEM_MC_BEGIN(3, 2);
16492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16493 IEM_MC_LOCAL(uint16_t, u16Fsw);
16494 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16495 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16497
16498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16502
16503 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16504 IEM_MC_PREPARE_FPU_USAGE();
16505 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16506 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16507 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16508 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16509 IEM_MC_ELSE()
16510 IEM_MC_IF_FCW_IM()
16511 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16512 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16513 IEM_MC_ENDIF();
16514 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16515 IEM_MC_ENDIF();
16516 IEM_MC_ADVANCE_RIP();
16517
16518 IEM_MC_END();
16519 return VINF_SUCCESS;
16520}
16521
16522
16523/** Opcode 0xdd !11/0. */
16524FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16525{
16526 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16527 IEM_MC_BEGIN(3, 2);
16528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16529 IEM_MC_LOCAL(uint16_t, u16Fsw);
16530 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16531 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16533
16534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16538
16539 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16540 IEM_MC_PREPARE_FPU_USAGE();
16541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16542 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16543 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16544 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16545 IEM_MC_ELSE()
16546 IEM_MC_IF_FCW_IM()
16547 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16548 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16549 IEM_MC_ENDIF();
16550 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16551 IEM_MC_ENDIF();
16552 IEM_MC_ADVANCE_RIP();
16553
16554 IEM_MC_END();
16555 return VINF_SUCCESS;
16556}
16557
16558
16559
16560
16561/** Opcode 0xdd !11/0. */
16562FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16563{
16564 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16565 IEM_MC_BEGIN(3, 2);
16566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16567 IEM_MC_LOCAL(uint16_t, u16Fsw);
16568 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16569 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16571
16572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16576
16577 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16578 IEM_MC_PREPARE_FPU_USAGE();
16579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16581 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16582 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16583 IEM_MC_ELSE()
16584 IEM_MC_IF_FCW_IM()
16585 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16586 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16587 IEM_MC_ENDIF();
16588 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16589 IEM_MC_ENDIF();
16590 IEM_MC_ADVANCE_RIP();
16591
16592 IEM_MC_END();
16593 return VINF_SUCCESS;
16594}
16595
16596
16597/** Opcode 0xdd !11/0. */
16598FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16599{
16600 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16601 IEM_MC_BEGIN(3, 0);
16602 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16603 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16604 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16608 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16609 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16610 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16611 IEM_MC_END();
16612 return VINF_SUCCESS;
16613}
16614
16615
16616/** Opcode 0xdd !11/0. */
16617FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16618{
16619 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16620 IEM_MC_BEGIN(3, 0);
16621 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16622 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16623 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16627 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16628 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16629 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16630 IEM_MC_END();
16631 return VINF_SUCCESS;
16632
16633}
16634
16635/** Opcode 0xdd !11/0. */
16636FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16637{
16638 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16639
16640 IEM_MC_BEGIN(0, 2);
16641 IEM_MC_LOCAL(uint16_t, u16Tmp);
16642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16643
16644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16647
16648 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16649 IEM_MC_FETCH_FSW(u16Tmp);
16650 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16651 IEM_MC_ADVANCE_RIP();
16652
16653/** @todo Debug / drop a hint to the verifier that things may differ
16654 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16655 * NT4SP1. (X86_FSW_PE) */
16656 IEM_MC_END();
16657 return VINF_SUCCESS;
16658}
16659
16660
16661/** Opcode 0xdd 11/0. */
16662FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16663{
16664 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16666 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16667 unmodified. */
16668
16669 IEM_MC_BEGIN(0, 0);
16670
16671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16673
16674 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16675 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16676 IEM_MC_UPDATE_FPU_OPCODE_IP();
16677
16678 IEM_MC_ADVANCE_RIP();
16679 IEM_MC_END();
16680 return VINF_SUCCESS;
16681}
16682
16683
16684/** Opcode 0xdd 11/1. */
16685FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16686{
16687 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16689
16690 IEM_MC_BEGIN(0, 2);
16691 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16692 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16695
16696 IEM_MC_PREPARE_FPU_USAGE();
16697 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16698 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16699 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16700 IEM_MC_ELSE()
16701 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16702 IEM_MC_ENDIF();
16703
16704 IEM_MC_ADVANCE_RIP();
16705 IEM_MC_END();
16706 return VINF_SUCCESS;
16707}
16708
16709
16710/** Opcode 0xdd 11/3. */
16711FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16712{
16713 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16714 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16715}
16716
16717
16718/** Opcode 0xdd 11/4. */
16719FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16720{
16721 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16722 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16723}
16724
16725
16726/** Opcode 0xdd. */
16727FNIEMOP_DEF(iemOp_EscF5)
16728{
16729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16730 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16731 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16732 {
16733 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16734 {
16735 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16736 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16737 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16738 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16739 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16740 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16741 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16742 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16744 }
16745 }
16746 else
16747 {
16748 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16749 {
16750 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16751 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16752 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16753 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16754 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16755 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16756 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16757 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16759 }
16760 }
16761}
16762
16763
16764/** Opcode 0xde 11/0. */
16765FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16766{
16767 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16768 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16769}
16770
16771
16772/** Opcode 0xde 11/0. */
16773FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16774{
16775 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16776 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16777}
16778
16779
16780/** Opcode 0xde 0xd9. */
16781FNIEMOP_DEF(iemOp_fcompp)
16782{
16783 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16784 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16785}
16786
16787
16788/** Opcode 0xde 11/4. */
16789FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16790{
16791 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16792 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16793}
16794
16795
16796/** Opcode 0xde 11/5. */
16797FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16798{
16799 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16800 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16801}
16802
16803
16804/** Opcode 0xde 11/6. */
16805FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16806{
16807 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16808 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16809}
16810
16811
16812/** Opcode 0xde 11/7. */
16813FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16814{
16815 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16816 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16817}
16818
16819
16820/**
16821 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16822 * the result in ST0.
16823 *
16824 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16825 */
16826FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16827{
16828 IEM_MC_BEGIN(3, 3);
16829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16830 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16831 IEM_MC_LOCAL(int16_t, i16Val2);
16832 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16833 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16834 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16835
16836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16838
16839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16841 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16842
16843 IEM_MC_PREPARE_FPU_USAGE();
16844 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16845 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16846 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16847 IEM_MC_ELSE()
16848 IEM_MC_FPU_STACK_UNDERFLOW(0);
16849 IEM_MC_ENDIF();
16850 IEM_MC_ADVANCE_RIP();
16851
16852 IEM_MC_END();
16853 return VINF_SUCCESS;
16854}
16855
16856
16857/** Opcode 0xde !11/0. */
16858FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16859{
16860 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16862}
16863
16864
16865/** Opcode 0xde !11/1. */
16866FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16867{
16868 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16869 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16870}
16871
16872
16873/** Opcode 0xde !11/2. */
16874FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16875{
16876 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16877
16878 IEM_MC_BEGIN(3, 3);
16879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16880 IEM_MC_LOCAL(uint16_t, u16Fsw);
16881 IEM_MC_LOCAL(int16_t, i16Val2);
16882 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16884 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16885
16886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16888
16889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16891 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16892
16893 IEM_MC_PREPARE_FPU_USAGE();
16894 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16895 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16896 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16897 IEM_MC_ELSE()
16898 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16899 IEM_MC_ENDIF();
16900 IEM_MC_ADVANCE_RIP();
16901
16902 IEM_MC_END();
16903 return VINF_SUCCESS;
16904}
16905
16906
16907/** Opcode 0xde !11/3. */
16908FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16909{
16910 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16911
16912 IEM_MC_BEGIN(3, 3);
16913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16914 IEM_MC_LOCAL(uint16_t, u16Fsw);
16915 IEM_MC_LOCAL(int16_t, i16Val2);
16916 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16917 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16918 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16919
16920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16922
16923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16925 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16926
16927 IEM_MC_PREPARE_FPU_USAGE();
16928 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16929 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16930 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16931 IEM_MC_ELSE()
16932 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16933 IEM_MC_ENDIF();
16934 IEM_MC_ADVANCE_RIP();
16935
16936 IEM_MC_END();
16937 return VINF_SUCCESS;
16938}
16939
16940
16941/** Opcode 0xde !11/4. */
16942FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16943{
16944 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16945 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16946}
16947
16948
16949/** Opcode 0xde !11/5. */
16950FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16951{
16952 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16954}
16955
16956
16957/** Opcode 0xde !11/6. */
16958FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16959{
16960 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16962}
16963
16964
16965/** Opcode 0xde !11/7. */
16966FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16967{
16968 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16970}
16971
16972
16973/** Opcode 0xde. */
16974FNIEMOP_DEF(iemOp_EscF6)
16975{
16976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16977 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16978 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16979 {
16980 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16981 {
16982 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16983 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16984 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16985 case 3: if (bRm == 0xd9)
16986 return FNIEMOP_CALL(iemOp_fcompp);
16987 return IEMOP_RAISE_INVALID_OPCODE();
16988 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16989 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16990 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16991 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16993 }
16994 }
16995 else
16996 {
16997 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16998 {
16999 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
17000 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
17001 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
17002 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
17003 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
17004 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
17005 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
17006 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
17007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17008 }
17009 }
17010}
17011
17012
17013/** Opcode 0xdf 11/0.
17014 * Undocument instruction, assumed to work like ffree + fincstp. */
17015FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17016{
17017 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17019
17020 IEM_MC_BEGIN(0, 0);
17021
17022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17024
17025 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17026 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17027 IEM_MC_FPU_STACK_INC_TOP();
17028 IEM_MC_UPDATE_FPU_OPCODE_IP();
17029
17030 IEM_MC_ADVANCE_RIP();
17031 IEM_MC_END();
17032 return VINF_SUCCESS;
17033}
17034
17035
17036/** Opcode 0xdf 0xe0. */
17037FNIEMOP_DEF(iemOp_fnstsw_ax)
17038{
17039 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17041
17042 IEM_MC_BEGIN(0, 1);
17043 IEM_MC_LOCAL(uint16_t, u16Tmp);
17044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17046 IEM_MC_FETCH_FSW(u16Tmp);
17047 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17048 IEM_MC_ADVANCE_RIP();
17049 IEM_MC_END();
17050 return VINF_SUCCESS;
17051}
17052
17053
17054/** Opcode 0xdf 11/5. */
17055FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17056{
17057 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17058 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17059}
17060
17061
17062/** Opcode 0xdf 11/6. */
17063FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17064{
17065 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17066 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17067}
17068
17069
17070/** Opcode 0xdf !11/0. */
17071FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17072{
17073 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17074
17075 IEM_MC_BEGIN(2, 3);
17076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17077 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17078 IEM_MC_LOCAL(int16_t, i16Val);
17079 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17080 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17081
17082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17084
17085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17086 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17087 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17088
17089 IEM_MC_PREPARE_FPU_USAGE();
17090 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17091 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17092 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17093 IEM_MC_ELSE()
17094 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17095 IEM_MC_ENDIF();
17096 IEM_MC_ADVANCE_RIP();
17097
17098 IEM_MC_END();
17099 return VINF_SUCCESS;
17100}
17101
17102
17103/** Opcode 0xdf !11/1. */
17104FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17105{
17106 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17107 IEM_MC_BEGIN(3, 2);
17108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17109 IEM_MC_LOCAL(uint16_t, u16Fsw);
17110 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17111 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17112 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17113
17114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17118
17119 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17120 IEM_MC_PREPARE_FPU_USAGE();
17121 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17122 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17123 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17124 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17125 IEM_MC_ELSE()
17126 IEM_MC_IF_FCW_IM()
17127 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17128 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17129 IEM_MC_ENDIF();
17130 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17131 IEM_MC_ENDIF();
17132 IEM_MC_ADVANCE_RIP();
17133
17134 IEM_MC_END();
17135 return VINF_SUCCESS;
17136}
17137
17138
17139/** Opcode 0xdf !11/2. */
17140FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17141{
17142 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17143 IEM_MC_BEGIN(3, 2);
17144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17145 IEM_MC_LOCAL(uint16_t, u16Fsw);
17146 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17147 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17149
17150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17154
17155 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17156 IEM_MC_PREPARE_FPU_USAGE();
17157 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17158 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17159 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17160 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17161 IEM_MC_ELSE()
17162 IEM_MC_IF_FCW_IM()
17163 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17164 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17165 IEM_MC_ENDIF();
17166 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17167 IEM_MC_ENDIF();
17168 IEM_MC_ADVANCE_RIP();
17169
17170 IEM_MC_END();
17171 return VINF_SUCCESS;
17172}
17173
17174
17175/** Opcode 0xdf !11/3. */
17176FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17177{
17178 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17179 IEM_MC_BEGIN(3, 2);
17180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17181 IEM_MC_LOCAL(uint16_t, u16Fsw);
17182 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17183 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17185
17186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17190
17191 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17192 IEM_MC_PREPARE_FPU_USAGE();
17193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17194 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17195 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17196 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17197 IEM_MC_ELSE()
17198 IEM_MC_IF_FCW_IM()
17199 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17200 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17201 IEM_MC_ENDIF();
17202 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17203 IEM_MC_ENDIF();
17204 IEM_MC_ADVANCE_RIP();
17205
17206 IEM_MC_END();
17207 return VINF_SUCCESS;
17208}
17209
17210
17211/** Opcode 0xdf !11/4. */
17212FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17213
17214
17215/** Opcode 0xdf !11/5. */
17216FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17217{
17218 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17219
17220 IEM_MC_BEGIN(2, 3);
17221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17222 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17223 IEM_MC_LOCAL(int64_t, i64Val);
17224 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17225 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17226
17227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17229
17230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17232 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17233
17234 IEM_MC_PREPARE_FPU_USAGE();
17235 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17236 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17237 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17238 IEM_MC_ELSE()
17239 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17240 IEM_MC_ENDIF();
17241 IEM_MC_ADVANCE_RIP();
17242
17243 IEM_MC_END();
17244 return VINF_SUCCESS;
17245}
17246
17247
17248/** Opcode 0xdf !11/6. */
17249FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17250
17251
17252/** Opcode 0xdf !11/7. */
17253FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17254{
17255 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17256 IEM_MC_BEGIN(3, 2);
17257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17258 IEM_MC_LOCAL(uint16_t, u16Fsw);
17259 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17260 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17261 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17262
17263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17266 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17267
17268 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17269 IEM_MC_PREPARE_FPU_USAGE();
17270 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17271 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17272 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17273 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17274 IEM_MC_ELSE()
17275 IEM_MC_IF_FCW_IM()
17276 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17277 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17278 IEM_MC_ENDIF();
17279 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17280 IEM_MC_ENDIF();
17281 IEM_MC_ADVANCE_RIP();
17282
17283 IEM_MC_END();
17284 return VINF_SUCCESS;
17285}
17286
17287
17288/** Opcode 0xdf. */
17289FNIEMOP_DEF(iemOp_EscF7)
17290{
17291 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17293 {
17294 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17295 {
17296 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17297 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17298 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17299 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17300 case 4: if (bRm == 0xe0)
17301 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17302 return IEMOP_RAISE_INVALID_OPCODE();
17303 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17304 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17305 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17307 }
17308 }
17309 else
17310 {
17311 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17312 {
17313 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17314 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17315 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17316 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17317 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17318 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17319 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17320 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17322 }
17323 }
17324}
17325
17326
17327/** Opcode 0xe0. */
17328FNIEMOP_DEF(iemOp_loopne_Jb)
17329{
17330 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17331 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17333 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17334
17335 switch (pVCpu->iem.s.enmEffAddrMode)
17336 {
17337 case IEMMODE_16BIT:
17338 IEM_MC_BEGIN(0,0);
17339 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17340 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17341 IEM_MC_REL_JMP_S8(i8Imm);
17342 } IEM_MC_ELSE() {
17343 IEM_MC_ADVANCE_RIP();
17344 } IEM_MC_ENDIF();
17345 IEM_MC_END();
17346 return VINF_SUCCESS;
17347
17348 case IEMMODE_32BIT:
17349 IEM_MC_BEGIN(0,0);
17350 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17351 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17352 IEM_MC_REL_JMP_S8(i8Imm);
17353 } IEM_MC_ELSE() {
17354 IEM_MC_ADVANCE_RIP();
17355 } IEM_MC_ENDIF();
17356 IEM_MC_END();
17357 return VINF_SUCCESS;
17358
17359 case IEMMODE_64BIT:
17360 IEM_MC_BEGIN(0,0);
17361 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17362 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17363 IEM_MC_REL_JMP_S8(i8Imm);
17364 } IEM_MC_ELSE() {
17365 IEM_MC_ADVANCE_RIP();
17366 } IEM_MC_ENDIF();
17367 IEM_MC_END();
17368 return VINF_SUCCESS;
17369
17370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17371 }
17372}
17373
17374
17375/** Opcode 0xe1. */
17376FNIEMOP_DEF(iemOp_loope_Jb)
17377{
17378 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17379 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17382
17383 switch (pVCpu->iem.s.enmEffAddrMode)
17384 {
17385 case IEMMODE_16BIT:
17386 IEM_MC_BEGIN(0,0);
17387 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17388 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17389 IEM_MC_REL_JMP_S8(i8Imm);
17390 } IEM_MC_ELSE() {
17391 IEM_MC_ADVANCE_RIP();
17392 } IEM_MC_ENDIF();
17393 IEM_MC_END();
17394 return VINF_SUCCESS;
17395
17396 case IEMMODE_32BIT:
17397 IEM_MC_BEGIN(0,0);
17398 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17399 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17400 IEM_MC_REL_JMP_S8(i8Imm);
17401 } IEM_MC_ELSE() {
17402 IEM_MC_ADVANCE_RIP();
17403 } IEM_MC_ENDIF();
17404 IEM_MC_END();
17405 return VINF_SUCCESS;
17406
17407 case IEMMODE_64BIT:
17408 IEM_MC_BEGIN(0,0);
17409 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17410 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17411 IEM_MC_REL_JMP_S8(i8Imm);
17412 } IEM_MC_ELSE() {
17413 IEM_MC_ADVANCE_RIP();
17414 } IEM_MC_ENDIF();
17415 IEM_MC_END();
17416 return VINF_SUCCESS;
17417
17418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17419 }
17420}
17421
17422
17423/** Opcode 0xe2. */
17424FNIEMOP_DEF(iemOp_loop_Jb)
17425{
17426 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17427 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17429 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17430
17431 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17432 * using the 32-bit operand size override. How can that be restarted? See
17433 * weird pseudo code in intel manual. */
17434 switch (pVCpu->iem.s.enmEffAddrMode)
17435 {
17436 case IEMMODE_16BIT:
17437 IEM_MC_BEGIN(0,0);
17438 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17439 {
17440 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17441 IEM_MC_IF_CX_IS_NZ() {
17442 IEM_MC_REL_JMP_S8(i8Imm);
17443 } IEM_MC_ELSE() {
17444 IEM_MC_ADVANCE_RIP();
17445 } IEM_MC_ENDIF();
17446 }
17447 else
17448 {
17449 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17450 IEM_MC_ADVANCE_RIP();
17451 }
17452 IEM_MC_END();
17453 return VINF_SUCCESS;
17454
17455 case IEMMODE_32BIT:
17456 IEM_MC_BEGIN(0,0);
17457 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17458 {
17459 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17460 IEM_MC_IF_ECX_IS_NZ() {
17461 IEM_MC_REL_JMP_S8(i8Imm);
17462 } IEM_MC_ELSE() {
17463 IEM_MC_ADVANCE_RIP();
17464 } IEM_MC_ENDIF();
17465 }
17466 else
17467 {
17468 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17469 IEM_MC_ADVANCE_RIP();
17470 }
17471 IEM_MC_END();
17472 return VINF_SUCCESS;
17473
17474 case IEMMODE_64BIT:
17475 IEM_MC_BEGIN(0,0);
17476 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17477 {
17478 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17479 IEM_MC_IF_RCX_IS_NZ() {
17480 IEM_MC_REL_JMP_S8(i8Imm);
17481 } IEM_MC_ELSE() {
17482 IEM_MC_ADVANCE_RIP();
17483 } IEM_MC_ENDIF();
17484 }
17485 else
17486 {
17487 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17488 IEM_MC_ADVANCE_RIP();
17489 }
17490 IEM_MC_END();
17491 return VINF_SUCCESS;
17492
17493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17494 }
17495}
17496
17497
17498/** Opcode 0xe3. */
17499FNIEMOP_DEF(iemOp_jecxz_Jb)
17500{
17501 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17502 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17504 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17505
17506 switch (pVCpu->iem.s.enmEffAddrMode)
17507 {
17508 case IEMMODE_16BIT:
17509 IEM_MC_BEGIN(0,0);
17510 IEM_MC_IF_CX_IS_NZ() {
17511 IEM_MC_ADVANCE_RIP();
17512 } IEM_MC_ELSE() {
17513 IEM_MC_REL_JMP_S8(i8Imm);
17514 } IEM_MC_ENDIF();
17515 IEM_MC_END();
17516 return VINF_SUCCESS;
17517
17518 case IEMMODE_32BIT:
17519 IEM_MC_BEGIN(0,0);
17520 IEM_MC_IF_ECX_IS_NZ() {
17521 IEM_MC_ADVANCE_RIP();
17522 } IEM_MC_ELSE() {
17523 IEM_MC_REL_JMP_S8(i8Imm);
17524 } IEM_MC_ENDIF();
17525 IEM_MC_END();
17526 return VINF_SUCCESS;
17527
17528 case IEMMODE_64BIT:
17529 IEM_MC_BEGIN(0,0);
17530 IEM_MC_IF_RCX_IS_NZ() {
17531 IEM_MC_ADVANCE_RIP();
17532 } IEM_MC_ELSE() {
17533 IEM_MC_REL_JMP_S8(i8Imm);
17534 } IEM_MC_ENDIF();
17535 IEM_MC_END();
17536 return VINF_SUCCESS;
17537
17538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17539 }
17540}
17541
17542
17543/** Opcode 0xe4 */
17544FNIEMOP_DEF(iemOp_in_AL_Ib)
17545{
17546 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17547 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17549 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17550}
17551
17552
17553/** Opcode 0xe5 */
17554FNIEMOP_DEF(iemOp_in_eAX_Ib)
17555{
17556 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17557 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17559 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17560}
17561
17562
17563/** Opcode 0xe6 */
17564FNIEMOP_DEF(iemOp_out_Ib_AL)
17565{
17566 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17567 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17569 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17570}
17571
17572
17573/** Opcode 0xe7 */
17574FNIEMOP_DEF(iemOp_out_Ib_eAX)
17575{
17576 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17577 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17579 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17580}
17581
17582
17583/** Opcode 0xe8. */
17584FNIEMOP_DEF(iemOp_call_Jv)
17585{
17586 IEMOP_MNEMONIC(call_Jv, "call Jv");
17587 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17588 switch (pVCpu->iem.s.enmEffOpSize)
17589 {
17590 case IEMMODE_16BIT:
17591 {
17592 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17593 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17594 }
17595
17596 case IEMMODE_32BIT:
17597 {
17598 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17599 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17600 }
17601
17602 case IEMMODE_64BIT:
17603 {
17604 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17605 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17606 }
17607
17608 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17609 }
17610}
17611
17612
17613/** Opcode 0xe9. */
17614FNIEMOP_DEF(iemOp_jmp_Jv)
17615{
17616 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17618 switch (pVCpu->iem.s.enmEffOpSize)
17619 {
17620 case IEMMODE_16BIT:
17621 {
17622 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17623 IEM_MC_BEGIN(0, 0);
17624 IEM_MC_REL_JMP_S16(i16Imm);
17625 IEM_MC_END();
17626 return VINF_SUCCESS;
17627 }
17628
17629 case IEMMODE_64BIT:
17630 case IEMMODE_32BIT:
17631 {
17632 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17633 IEM_MC_BEGIN(0, 0);
17634 IEM_MC_REL_JMP_S32(i32Imm);
17635 IEM_MC_END();
17636 return VINF_SUCCESS;
17637 }
17638
17639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17640 }
17641}
17642
17643
17644/** Opcode 0xea. */
17645FNIEMOP_DEF(iemOp_jmp_Ap)
17646{
17647 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17648 IEMOP_HLP_NO_64BIT();
17649
17650 /* Decode the far pointer address and pass it on to the far call C implementation. */
17651 uint32_t offSeg;
17652 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17653 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17654 else
17655 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17656 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17658 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17659}
17660
17661
17662/** Opcode 0xeb. */
17663FNIEMOP_DEF(iemOp_jmp_Jb)
17664{
17665 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17666 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17669
17670 IEM_MC_BEGIN(0, 0);
17671 IEM_MC_REL_JMP_S8(i8Imm);
17672 IEM_MC_END();
17673 return VINF_SUCCESS;
17674}
17675
17676
17677/** Opcode 0xec */
17678FNIEMOP_DEF(iemOp_in_AL_DX)
17679{
17680 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17682 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17683}
17684
17685
17686/** Opcode 0xed */
17687FNIEMOP_DEF(iemOp_eAX_DX)
17688{
17689 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17691 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17692}
17693
17694
17695/** Opcode 0xee */
17696FNIEMOP_DEF(iemOp_out_DX_AL)
17697{
17698 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17700 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17701}
17702
17703
17704/** Opcode 0xef */
17705FNIEMOP_DEF(iemOp_out_DX_eAX)
17706{
17707 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17709 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17710}
17711
17712
17713/** Opcode 0xf0. */
17714FNIEMOP_DEF(iemOp_lock)
17715{
17716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17718
17719 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17720 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17721}
17722
17723
17724/** Opcode 0xf1. */
17725FNIEMOP_DEF(iemOp_int_1)
17726{
17727 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17728 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17729 /** @todo testcase! */
17730 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17731}
17732
17733
17734/** Opcode 0xf2. */
17735FNIEMOP_DEF(iemOp_repne)
17736{
17737 /* This overrides any previous REPE prefix. */
17738 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17739 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17740 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17741
17742 /* For the 4 entry opcode tables, REPNZ overrides any previous
17743 REPZ and operand size prefixes. */
17744 pVCpu->iem.s.idxPrefix = 3;
17745
17746 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17747 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17748}
17749
17750
17751/** Opcode 0xf3. */
17752FNIEMOP_DEF(iemOp_repe)
17753{
17754 /* This overrides any previous REPNE prefix. */
17755 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17756 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17757 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17758
17759 /* For the 4 entry opcode tables, REPNZ overrides any previous
17760 REPNZ and operand size prefixes. */
17761 pVCpu->iem.s.idxPrefix = 2;
17762
17763 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17764 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17765}
17766
17767
17768/** Opcode 0xf4. */
17769FNIEMOP_DEF(iemOp_hlt)
17770{
17771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17772 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17773}
17774
17775
17776/** Opcode 0xf5. */
17777FNIEMOP_DEF(iemOp_cmc)
17778{
17779 IEMOP_MNEMONIC(cmc, "cmc");
17780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17781 IEM_MC_BEGIN(0, 0);
17782 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17783 IEM_MC_ADVANCE_RIP();
17784 IEM_MC_END();
17785 return VINF_SUCCESS;
17786}
17787
17788
17789/**
17790 * Common implementation of 'inc/dec/not/neg Eb'.
17791 *
17792 * @param bRm The RM byte.
17793 * @param pImpl The instruction implementation.
17794 */
17795FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17796{
17797 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17798 {
17799 /* register access */
17800 IEM_MC_BEGIN(2, 0);
17801 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17802 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17803 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17804 IEM_MC_REF_EFLAGS(pEFlags);
17805 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17806 IEM_MC_ADVANCE_RIP();
17807 IEM_MC_END();
17808 }
17809 else
17810 {
17811 /* memory access. */
17812 IEM_MC_BEGIN(2, 2);
17813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17814 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17816
17817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17818 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17819 IEM_MC_FETCH_EFLAGS(EFlags);
17820 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17821 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17822 else
17823 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17824
17825 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17826 IEM_MC_COMMIT_EFLAGS(EFlags);
17827 IEM_MC_ADVANCE_RIP();
17828 IEM_MC_END();
17829 }
17830 return VINF_SUCCESS;
17831}
17832
17833
17834/**
17835 * Common implementation of 'inc/dec/not/neg Ev'.
17836 *
17837 * @param bRm The RM byte.
17838 * @param pImpl The instruction implementation.
17839 */
17840FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17841{
17842 /* Registers are handled by a common worker. */
17843 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17844 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17845
17846 /* Memory we do here. */
17847 switch (pVCpu->iem.s.enmEffOpSize)
17848 {
17849 case IEMMODE_16BIT:
17850 IEM_MC_BEGIN(2, 2);
17851 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17852 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17854
17855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17856 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17857 IEM_MC_FETCH_EFLAGS(EFlags);
17858 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17859 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17860 else
17861 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17862
17863 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17864 IEM_MC_COMMIT_EFLAGS(EFlags);
17865 IEM_MC_ADVANCE_RIP();
17866 IEM_MC_END();
17867 return VINF_SUCCESS;
17868
17869 case IEMMODE_32BIT:
17870 IEM_MC_BEGIN(2, 2);
17871 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17874
17875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17876 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17877 IEM_MC_FETCH_EFLAGS(EFlags);
17878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17879 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17880 else
17881 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17882
17883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17884 IEM_MC_COMMIT_EFLAGS(EFlags);
17885 IEM_MC_ADVANCE_RIP();
17886 IEM_MC_END();
17887 return VINF_SUCCESS;
17888
17889 case IEMMODE_64BIT:
17890 IEM_MC_BEGIN(2, 2);
17891 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17892 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17894
17895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17896 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17897 IEM_MC_FETCH_EFLAGS(EFlags);
17898 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17899 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17900 else
17901 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17902
17903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17904 IEM_MC_COMMIT_EFLAGS(EFlags);
17905 IEM_MC_ADVANCE_RIP();
17906 IEM_MC_END();
17907 return VINF_SUCCESS;
17908
17909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17910 }
17911}
17912
17913
17914/** Opcode 0xf6 /0. */
17915FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17916{
17917 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17919
17920 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17921 {
17922 /* register access */
17923 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17925
17926 IEM_MC_BEGIN(3, 0);
17927 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17928 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17930 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17931 IEM_MC_REF_EFLAGS(pEFlags);
17932 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17933 IEM_MC_ADVANCE_RIP();
17934 IEM_MC_END();
17935 }
17936 else
17937 {
17938 /* memory access. */
17939 IEM_MC_BEGIN(3, 2);
17940 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17941 IEM_MC_ARG(uint8_t, u8Src, 1);
17942 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17944
17945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17946 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17947 IEM_MC_ASSIGN(u8Src, u8Imm);
17948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17949 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17950 IEM_MC_FETCH_EFLAGS(EFlags);
17951 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17952
17953 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17954 IEM_MC_COMMIT_EFLAGS(EFlags);
17955 IEM_MC_ADVANCE_RIP();
17956 IEM_MC_END();
17957 }
17958 return VINF_SUCCESS;
17959}
17960
17961
17962/** Opcode 0xf7 /0. */
17963FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17964{
17965 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17967
17968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17969 {
17970 /* register access */
17971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17972 switch (pVCpu->iem.s.enmEffOpSize)
17973 {
17974 case IEMMODE_16BIT:
17975 {
17976 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17977 IEM_MC_BEGIN(3, 0);
17978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17979 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17981 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17982 IEM_MC_REF_EFLAGS(pEFlags);
17983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17984 IEM_MC_ADVANCE_RIP();
17985 IEM_MC_END();
17986 return VINF_SUCCESS;
17987 }
17988
17989 case IEMMODE_32BIT:
17990 {
17991 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17992 IEM_MC_BEGIN(3, 0);
17993 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17994 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17995 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17996 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17997 IEM_MC_REF_EFLAGS(pEFlags);
17998 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17999 /* No clearing the high dword here - test doesn't write back the result. */
18000 IEM_MC_ADVANCE_RIP();
18001 IEM_MC_END();
18002 return VINF_SUCCESS;
18003 }
18004
18005 case IEMMODE_64BIT:
18006 {
18007 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18008 IEM_MC_BEGIN(3, 0);
18009 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18010 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
18011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18012 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18013 IEM_MC_REF_EFLAGS(pEFlags);
18014 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18015 IEM_MC_ADVANCE_RIP();
18016 IEM_MC_END();
18017 return VINF_SUCCESS;
18018 }
18019
18020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18021 }
18022 }
18023 else
18024 {
18025 /* memory access. */
18026 switch (pVCpu->iem.s.enmEffOpSize)
18027 {
18028 case IEMMODE_16BIT:
18029 {
18030 IEM_MC_BEGIN(3, 2);
18031 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18032 IEM_MC_ARG(uint16_t, u16Src, 1);
18033 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18035
18036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18037 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18038 IEM_MC_ASSIGN(u16Src, u16Imm);
18039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18040 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18041 IEM_MC_FETCH_EFLAGS(EFlags);
18042 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18043
18044 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18045 IEM_MC_COMMIT_EFLAGS(EFlags);
18046 IEM_MC_ADVANCE_RIP();
18047 IEM_MC_END();
18048 return VINF_SUCCESS;
18049 }
18050
18051 case IEMMODE_32BIT:
18052 {
18053 IEM_MC_BEGIN(3, 2);
18054 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18055 IEM_MC_ARG(uint32_t, u32Src, 1);
18056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18058
18059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18060 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18061 IEM_MC_ASSIGN(u32Src, u32Imm);
18062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18063 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18064 IEM_MC_FETCH_EFLAGS(EFlags);
18065 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18066
18067 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18068 IEM_MC_COMMIT_EFLAGS(EFlags);
18069 IEM_MC_ADVANCE_RIP();
18070 IEM_MC_END();
18071 return VINF_SUCCESS;
18072 }
18073
18074 case IEMMODE_64BIT:
18075 {
18076 IEM_MC_BEGIN(3, 2);
18077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18078 IEM_MC_ARG(uint64_t, u64Src, 1);
18079 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18081
18082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18083 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18084 IEM_MC_ASSIGN(u64Src, u64Imm);
18085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18086 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18087 IEM_MC_FETCH_EFLAGS(EFlags);
18088 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18089
18090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18091 IEM_MC_COMMIT_EFLAGS(EFlags);
18092 IEM_MC_ADVANCE_RIP();
18093 IEM_MC_END();
18094 return VINF_SUCCESS;
18095 }
18096
18097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18098 }
18099 }
18100}
18101
18102
18103/** Opcode 0xf6 /4, /5, /6 and /7. */
18104FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18105{
18106 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18107 {
18108 /* register access */
18109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18110 IEM_MC_BEGIN(3, 1);
18111 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18112 IEM_MC_ARG(uint8_t, u8Value, 1);
18113 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18114 IEM_MC_LOCAL(int32_t, rc);
18115
18116 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18117 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18118 IEM_MC_REF_EFLAGS(pEFlags);
18119 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18120 IEM_MC_IF_LOCAL_IS_Z(rc) {
18121 IEM_MC_ADVANCE_RIP();
18122 } IEM_MC_ELSE() {
18123 IEM_MC_RAISE_DIVIDE_ERROR();
18124 } IEM_MC_ENDIF();
18125
18126 IEM_MC_END();
18127 }
18128 else
18129 {
18130 /* memory access. */
18131 IEM_MC_BEGIN(3, 2);
18132 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18133 IEM_MC_ARG(uint8_t, u8Value, 1);
18134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18136 IEM_MC_LOCAL(int32_t, rc);
18137
18138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18140 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18141 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18142 IEM_MC_REF_EFLAGS(pEFlags);
18143 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18144 IEM_MC_IF_LOCAL_IS_Z(rc) {
18145 IEM_MC_ADVANCE_RIP();
18146 } IEM_MC_ELSE() {
18147 IEM_MC_RAISE_DIVIDE_ERROR();
18148 } IEM_MC_ENDIF();
18149
18150 IEM_MC_END();
18151 }
18152 return VINF_SUCCESS;
18153}
18154
18155
18156/** Opcode 0xf7 /4, /5, /6 and /7. */
18157FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18158{
18159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18160
18161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18162 {
18163 /* register access */
18164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18165 switch (pVCpu->iem.s.enmEffOpSize)
18166 {
18167 case IEMMODE_16BIT:
18168 {
18169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18170 IEM_MC_BEGIN(4, 1);
18171 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18172 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18173 IEM_MC_ARG(uint16_t, u16Value, 2);
18174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18175 IEM_MC_LOCAL(int32_t, rc);
18176
18177 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18178 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18179 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18180 IEM_MC_REF_EFLAGS(pEFlags);
18181 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18182 IEM_MC_IF_LOCAL_IS_Z(rc) {
18183 IEM_MC_ADVANCE_RIP();
18184 } IEM_MC_ELSE() {
18185 IEM_MC_RAISE_DIVIDE_ERROR();
18186 } IEM_MC_ENDIF();
18187
18188 IEM_MC_END();
18189 return VINF_SUCCESS;
18190 }
18191
18192 case IEMMODE_32BIT:
18193 {
18194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18195 IEM_MC_BEGIN(4, 1);
18196 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18197 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18198 IEM_MC_ARG(uint32_t, u32Value, 2);
18199 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18200 IEM_MC_LOCAL(int32_t, rc);
18201
18202 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18203 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18204 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18205 IEM_MC_REF_EFLAGS(pEFlags);
18206 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18207 IEM_MC_IF_LOCAL_IS_Z(rc) {
18208 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18209 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18210 IEM_MC_ADVANCE_RIP();
18211 } IEM_MC_ELSE() {
18212 IEM_MC_RAISE_DIVIDE_ERROR();
18213 } IEM_MC_ENDIF();
18214
18215 IEM_MC_END();
18216 return VINF_SUCCESS;
18217 }
18218
18219 case IEMMODE_64BIT:
18220 {
18221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18222 IEM_MC_BEGIN(4, 1);
18223 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18224 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18225 IEM_MC_ARG(uint64_t, u64Value, 2);
18226 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18227 IEM_MC_LOCAL(int32_t, rc);
18228
18229 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18230 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18231 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18232 IEM_MC_REF_EFLAGS(pEFlags);
18233 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18234 IEM_MC_IF_LOCAL_IS_Z(rc) {
18235 IEM_MC_ADVANCE_RIP();
18236 } IEM_MC_ELSE() {
18237 IEM_MC_RAISE_DIVIDE_ERROR();
18238 } IEM_MC_ENDIF();
18239
18240 IEM_MC_END();
18241 return VINF_SUCCESS;
18242 }
18243
18244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18245 }
18246 }
18247 else
18248 {
18249 /* memory access. */
18250 switch (pVCpu->iem.s.enmEffOpSize)
18251 {
18252 case IEMMODE_16BIT:
18253 {
18254 IEM_MC_BEGIN(4, 2);
18255 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18256 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18257 IEM_MC_ARG(uint16_t, u16Value, 2);
18258 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18260 IEM_MC_LOCAL(int32_t, rc);
18261
18262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18264 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18265 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18266 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18267 IEM_MC_REF_EFLAGS(pEFlags);
18268 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18269 IEM_MC_IF_LOCAL_IS_Z(rc) {
18270 IEM_MC_ADVANCE_RIP();
18271 } IEM_MC_ELSE() {
18272 IEM_MC_RAISE_DIVIDE_ERROR();
18273 } IEM_MC_ENDIF();
18274
18275 IEM_MC_END();
18276 return VINF_SUCCESS;
18277 }
18278
18279 case IEMMODE_32BIT:
18280 {
18281 IEM_MC_BEGIN(4, 2);
18282 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18283 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18284 IEM_MC_ARG(uint32_t, u32Value, 2);
18285 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18287 IEM_MC_LOCAL(int32_t, rc);
18288
18289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18291 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18292 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18293 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18294 IEM_MC_REF_EFLAGS(pEFlags);
18295 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18296 IEM_MC_IF_LOCAL_IS_Z(rc) {
18297 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18298 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18299 IEM_MC_ADVANCE_RIP();
18300 } IEM_MC_ELSE() {
18301 IEM_MC_RAISE_DIVIDE_ERROR();
18302 } IEM_MC_ENDIF();
18303
18304 IEM_MC_END();
18305 return VINF_SUCCESS;
18306 }
18307
18308 case IEMMODE_64BIT:
18309 {
18310 IEM_MC_BEGIN(4, 2);
18311 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18312 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18313 IEM_MC_ARG(uint64_t, u64Value, 2);
18314 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18316 IEM_MC_LOCAL(int32_t, rc);
18317
18318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18320 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18321 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18322 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18323 IEM_MC_REF_EFLAGS(pEFlags);
18324 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18325 IEM_MC_IF_LOCAL_IS_Z(rc) {
18326 IEM_MC_ADVANCE_RIP();
18327 } IEM_MC_ELSE() {
18328 IEM_MC_RAISE_DIVIDE_ERROR();
18329 } IEM_MC_ENDIF();
18330
18331 IEM_MC_END();
18332 return VINF_SUCCESS;
18333 }
18334
18335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18336 }
18337 }
18338}
18339
18340/** Opcode 0xf6. */
18341FNIEMOP_DEF(iemOp_Grp3_Eb)
18342{
18343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18344 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18345 {
18346 case 0:
18347 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18348 case 1:
18349/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18350 return IEMOP_RAISE_INVALID_OPCODE();
18351 case 2:
18352 IEMOP_MNEMONIC(not_Eb, "not Eb");
18353 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18354 case 3:
18355 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18356 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18357 case 4:
18358 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18360 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18361 case 5:
18362 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18364 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18365 case 6:
18366 IEMOP_MNEMONIC(div_Eb, "div Eb");
18367 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18368 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18369 case 7:
18370 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18371 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18372 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18374 }
18375}
18376
18377
18378/** Opcode 0xf7. */
18379FNIEMOP_DEF(iemOp_Grp3_Ev)
18380{
18381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18382 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18383 {
18384 case 0:
18385 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18386 case 1:
18387/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18388 return IEMOP_RAISE_INVALID_OPCODE();
18389 case 2:
18390 IEMOP_MNEMONIC(not_Ev, "not Ev");
18391 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18392 case 3:
18393 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18394 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18395 case 4:
18396 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18397 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18398 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18399 case 5:
18400 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18401 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18402 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18403 case 6:
18404 IEMOP_MNEMONIC(div_Ev, "div Ev");
18405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18406 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18407 case 7:
18408 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18409 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18410 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18412 }
18413}
18414
18415
18416/** Opcode 0xf8. */
18417FNIEMOP_DEF(iemOp_clc)
18418{
18419 IEMOP_MNEMONIC(clc, "clc");
18420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18421 IEM_MC_BEGIN(0, 0);
18422 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18423 IEM_MC_ADVANCE_RIP();
18424 IEM_MC_END();
18425 return VINF_SUCCESS;
18426}
18427
18428
18429/** Opcode 0xf9. */
18430FNIEMOP_DEF(iemOp_stc)
18431{
18432 IEMOP_MNEMONIC(stc, "stc");
18433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18434 IEM_MC_BEGIN(0, 0);
18435 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18436 IEM_MC_ADVANCE_RIP();
18437 IEM_MC_END();
18438 return VINF_SUCCESS;
18439}
18440
18441
18442/** Opcode 0xfa. */
18443FNIEMOP_DEF(iemOp_cli)
18444{
18445 IEMOP_MNEMONIC(cli, "cli");
18446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18447 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18448}
18449
18450
18451FNIEMOP_DEF(iemOp_sti)
18452{
18453 IEMOP_MNEMONIC(sti, "sti");
18454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18455 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18456}
18457
18458
18459/** Opcode 0xfc. */
18460FNIEMOP_DEF(iemOp_cld)
18461{
18462 IEMOP_MNEMONIC(cld, "cld");
18463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18464 IEM_MC_BEGIN(0, 0);
18465 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18466 IEM_MC_ADVANCE_RIP();
18467 IEM_MC_END();
18468 return VINF_SUCCESS;
18469}
18470
18471
18472/** Opcode 0xfd. */
18473FNIEMOP_DEF(iemOp_std)
18474{
18475 IEMOP_MNEMONIC(std, "std");
18476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18477 IEM_MC_BEGIN(0, 0);
18478 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18479 IEM_MC_ADVANCE_RIP();
18480 IEM_MC_END();
18481 return VINF_SUCCESS;
18482}
18483
18484
18485/** Opcode 0xfe. */
18486FNIEMOP_DEF(iemOp_Grp4)
18487{
18488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18489 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18490 {
18491 case 0:
18492 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18493 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18494 case 1:
18495 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18496 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18497 default:
18498 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18499 return IEMOP_RAISE_INVALID_OPCODE();
18500 }
18501}
18502
18503
18504/**
18505 * Opcode 0xff /2.
18506 * @param bRm The RM byte.
18507 */
18508FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18509{
18510 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18511 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18512
18513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18514 {
18515 /* The new RIP is taken from a register. */
18516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18517 switch (pVCpu->iem.s.enmEffOpSize)
18518 {
18519 case IEMMODE_16BIT:
18520 IEM_MC_BEGIN(1, 0);
18521 IEM_MC_ARG(uint16_t, u16Target, 0);
18522 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18523 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18524 IEM_MC_END()
18525 return VINF_SUCCESS;
18526
18527 case IEMMODE_32BIT:
18528 IEM_MC_BEGIN(1, 0);
18529 IEM_MC_ARG(uint32_t, u32Target, 0);
18530 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18531 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18532 IEM_MC_END()
18533 return VINF_SUCCESS;
18534
18535 case IEMMODE_64BIT:
18536 IEM_MC_BEGIN(1, 0);
18537 IEM_MC_ARG(uint64_t, u64Target, 0);
18538 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18539 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18540 IEM_MC_END()
18541 return VINF_SUCCESS;
18542
18543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18544 }
18545 }
18546 else
18547 {
18548 /* The new RIP is taken from a register. */
18549 switch (pVCpu->iem.s.enmEffOpSize)
18550 {
18551 case IEMMODE_16BIT:
18552 IEM_MC_BEGIN(1, 1);
18553 IEM_MC_ARG(uint16_t, u16Target, 0);
18554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18557 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18558 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18559 IEM_MC_END()
18560 return VINF_SUCCESS;
18561
18562 case IEMMODE_32BIT:
18563 IEM_MC_BEGIN(1, 1);
18564 IEM_MC_ARG(uint32_t, u32Target, 0);
18565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18568 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18569 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18570 IEM_MC_END()
18571 return VINF_SUCCESS;
18572
18573 case IEMMODE_64BIT:
18574 IEM_MC_BEGIN(1, 1);
18575 IEM_MC_ARG(uint64_t, u64Target, 0);
18576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18579 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18580 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18581 IEM_MC_END()
18582 return VINF_SUCCESS;
18583
18584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18585 }
18586 }
18587}
18588
18589typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18590
18591FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18592{
18593 /* Registers? How?? */
18594 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18595 { /* likely */ }
18596 else
18597 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18598
18599 /* Far pointer loaded from memory. */
18600 switch (pVCpu->iem.s.enmEffOpSize)
18601 {
18602 case IEMMODE_16BIT:
18603 IEM_MC_BEGIN(3, 1);
18604 IEM_MC_ARG(uint16_t, u16Sel, 0);
18605 IEM_MC_ARG(uint16_t, offSeg, 1);
18606 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18610 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18611 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18612 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18613 IEM_MC_END();
18614 return VINF_SUCCESS;
18615
18616 case IEMMODE_64BIT:
18617 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18618 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18619 * and call far qword [rsp] encodings. */
18620 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18621 {
18622 IEM_MC_BEGIN(3, 1);
18623 IEM_MC_ARG(uint16_t, u16Sel, 0);
18624 IEM_MC_ARG(uint64_t, offSeg, 1);
18625 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18629 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18630 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18631 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18632 IEM_MC_END();
18633 return VINF_SUCCESS;
18634 }
18635 /* AMD falls thru. */
18636 /* fall thru */
18637
18638 case IEMMODE_32BIT:
18639 IEM_MC_BEGIN(3, 1);
18640 IEM_MC_ARG(uint16_t, u16Sel, 0);
18641 IEM_MC_ARG(uint32_t, offSeg, 1);
18642 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18646 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18647 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18648 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18649 IEM_MC_END();
18650 return VINF_SUCCESS;
18651
18652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18653 }
18654}
18655
18656
18657/**
18658 * Opcode 0xff /3.
18659 * @param bRm The RM byte.
18660 */
18661FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18662{
18663 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18664 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18665}
18666
18667
18668/**
18669 * Opcode 0xff /4.
18670 * @param bRm The RM byte.
18671 */
18672FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18673{
18674 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18676
18677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18678 {
18679 /* The new RIP is taken from a register. */
18680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18681 switch (pVCpu->iem.s.enmEffOpSize)
18682 {
18683 case IEMMODE_16BIT:
18684 IEM_MC_BEGIN(0, 1);
18685 IEM_MC_LOCAL(uint16_t, u16Target);
18686 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18687 IEM_MC_SET_RIP_U16(u16Target);
18688 IEM_MC_END()
18689 return VINF_SUCCESS;
18690
18691 case IEMMODE_32BIT:
18692 IEM_MC_BEGIN(0, 1);
18693 IEM_MC_LOCAL(uint32_t, u32Target);
18694 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18695 IEM_MC_SET_RIP_U32(u32Target);
18696 IEM_MC_END()
18697 return VINF_SUCCESS;
18698
18699 case IEMMODE_64BIT:
18700 IEM_MC_BEGIN(0, 1);
18701 IEM_MC_LOCAL(uint64_t, u64Target);
18702 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18703 IEM_MC_SET_RIP_U64(u64Target);
18704 IEM_MC_END()
18705 return VINF_SUCCESS;
18706
18707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18708 }
18709 }
18710 else
18711 {
18712 /* The new RIP is taken from a memory location. */
18713 switch (pVCpu->iem.s.enmEffOpSize)
18714 {
18715 case IEMMODE_16BIT:
18716 IEM_MC_BEGIN(0, 2);
18717 IEM_MC_LOCAL(uint16_t, u16Target);
18718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18721 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18722 IEM_MC_SET_RIP_U16(u16Target);
18723 IEM_MC_END()
18724 return VINF_SUCCESS;
18725
18726 case IEMMODE_32BIT:
18727 IEM_MC_BEGIN(0, 2);
18728 IEM_MC_LOCAL(uint32_t, u32Target);
18729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18732 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18733 IEM_MC_SET_RIP_U32(u32Target);
18734 IEM_MC_END()
18735 return VINF_SUCCESS;
18736
18737 case IEMMODE_64BIT:
18738 IEM_MC_BEGIN(0, 2);
18739 IEM_MC_LOCAL(uint64_t, u64Target);
18740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18743 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18744 IEM_MC_SET_RIP_U64(u64Target);
18745 IEM_MC_END()
18746 return VINF_SUCCESS;
18747
18748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18749 }
18750 }
18751}
18752
18753
18754/**
18755 * Opcode 0xff /5.
18756 * @param bRm The RM byte.
18757 */
18758FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18759{
18760 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18761 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18762}
18763
18764
18765/**
18766 * Opcode 0xff /6.
18767 * @param bRm The RM byte.
18768 */
18769FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18770{
18771 IEMOP_MNEMONIC(push_Ev, "push Ev");
18772
18773 /* Registers are handled by a common worker. */
18774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18775 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18776
18777 /* Memory we do here. */
18778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18779 switch (pVCpu->iem.s.enmEffOpSize)
18780 {
18781 case IEMMODE_16BIT:
18782 IEM_MC_BEGIN(0, 2);
18783 IEM_MC_LOCAL(uint16_t, u16Src);
18784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18787 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18788 IEM_MC_PUSH_U16(u16Src);
18789 IEM_MC_ADVANCE_RIP();
18790 IEM_MC_END();
18791 return VINF_SUCCESS;
18792
18793 case IEMMODE_32BIT:
18794 IEM_MC_BEGIN(0, 2);
18795 IEM_MC_LOCAL(uint32_t, u32Src);
18796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18799 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18800 IEM_MC_PUSH_U32(u32Src);
18801 IEM_MC_ADVANCE_RIP();
18802 IEM_MC_END();
18803 return VINF_SUCCESS;
18804
18805 case IEMMODE_64BIT:
18806 IEM_MC_BEGIN(0, 2);
18807 IEM_MC_LOCAL(uint64_t, u64Src);
18808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18811 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18812 IEM_MC_PUSH_U64(u64Src);
18813 IEM_MC_ADVANCE_RIP();
18814 IEM_MC_END();
18815 return VINF_SUCCESS;
18816
18817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18818 }
18819}
18820
18821
18822/** Opcode 0xff. */
18823FNIEMOP_DEF(iemOp_Grp5)
18824{
18825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18826 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18827 {
18828 case 0:
18829 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18830 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18831 case 1:
18832 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18833 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18834 case 2:
18835 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18836 case 3:
18837 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18838 case 4:
18839 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18840 case 5:
18841 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18842 case 6:
18843 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18844 case 7:
18845 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18846 return IEMOP_RAISE_INVALID_OPCODE();
18847 }
18848 AssertFailedReturn(VERR_IEM_IPE_3);
18849}
18850
18851
18852
18853const PFNIEMOP g_apfnOneByteMap[256] =
18854{
18855 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18856 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18857 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18858 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18859 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18860 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18861 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18862 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18863 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18864 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18865 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18866 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18867 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18868 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18869 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18870 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18871 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18872 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18873 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18874 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18875 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18876 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18877 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18878 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18879 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18880 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18881 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18882 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18883 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18884 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18885 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18886 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18887 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18888 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18889 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18890 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18891 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18892 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18893 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18894 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18895 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18896 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18897 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18898 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18899 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18900 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18901 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18902 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18903 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18904 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18905 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18906 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18907 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18908 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18909 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18910 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18911 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18912 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18913 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18914 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18915 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18916 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18917 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18918 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18919};
18920
18921
18922/** @} */
18923
18924#ifdef _MSC_VER
18925# pragma warning(pop)
18926#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette