VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65612

Last change on this file since 65612 was 65610, checked in by vboxsync, 8 years ago

IEM: 0x0f 0x60, 0x0f 0x61, 0x0f 0x62, and 0x0f 0x6c split up.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 664.3 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65610 2017-02-03 20:50:03Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551/** Invalid opcode where intel requires Mod R/M sequence. */
552FNIEMOP_DEF(iemOp_InvalidNeedRM)
553{
554 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
555 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
556 {
557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
558#ifndef TST_IEM_CHECK_MC
559 RTGCPTR GCPtrEff;
560 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
561 if (rcStrict != VINF_SUCCESS)
562 return rcStrict;
563#endif
564 IEMOP_HLP_DONE_DECODING();
565 }
566 return IEMOP_RAISE_INVALID_OPCODE();
567}
568
569
570/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
571 * immediate. */
572FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
573{
574 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
575 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
576 {
577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
578#ifndef TST_IEM_CHECK_MC
579 RTGCPTR GCPtrEff;
580 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
581 if (rcStrict != VINF_SUCCESS)
582 return rcStrict;
583#endif
584 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
585 IEMOP_HLP_DONE_DECODING();
586 }
587 return IEMOP_RAISE_INVALID_OPCODE();
588}
589
590
591/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
592 * sequence. */
593FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
594{
595 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
596 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
597 {
598 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
600#ifndef TST_IEM_CHECK_MC
601 RTGCPTR GCPtrEff;
602 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
603 if (rcStrict != VINF_SUCCESS)
604 return rcStrict;
605#endif
606 IEMOP_HLP_DONE_DECODING();
607 }
608 return IEMOP_RAISE_INVALID_OPCODE();
609}
610
611
612/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
613 * a 8-byte immediate. */
614FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
615{
616 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
617 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
618 {
619 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
621#ifndef TST_IEM_CHECK_MC
622 RTGCPTR GCPtrEff;
623 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
624 if (rcStrict != VINF_SUCCESS)
625 return rcStrict;
626#endif
627 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
628 IEMOP_HLP_DONE_DECODING();
629 }
630 return IEMOP_RAISE_INVALID_OPCODE();
631}
632
633
634
635/** @name ..... opcodes.
636 *
637 * @{
638 */
639
640/** @} */
641
642
643/** @name Two byte opcodes (first byte 0x0f).
644 *
645 * @{
646 */
647
648/** Opcode 0x0f 0x00 /0. */
649FNIEMOPRM_DEF(iemOp_Grp6_sldt)
650{
651 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
652 IEMOP_HLP_MIN_286();
653 IEMOP_HLP_NO_REAL_OR_V86_MODE();
654
655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
656 {
657 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
658 switch (pVCpu->iem.s.enmEffOpSize)
659 {
660 case IEMMODE_16BIT:
661 IEM_MC_BEGIN(0, 1);
662 IEM_MC_LOCAL(uint16_t, u16Ldtr);
663 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
664 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
665 IEM_MC_ADVANCE_RIP();
666 IEM_MC_END();
667 break;
668
669 case IEMMODE_32BIT:
670 IEM_MC_BEGIN(0, 1);
671 IEM_MC_LOCAL(uint32_t, u32Ldtr);
672 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
673 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 break;
677
678 case IEMMODE_64BIT:
679 IEM_MC_BEGIN(0, 1);
680 IEM_MC_LOCAL(uint64_t, u64Ldtr);
681 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
682 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
683 IEM_MC_ADVANCE_RIP();
684 IEM_MC_END();
685 break;
686
687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
688 }
689 }
690 else
691 {
692 IEM_MC_BEGIN(0, 2);
693 IEM_MC_LOCAL(uint16_t, u16Ldtr);
694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
696 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
697 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
699 IEM_MC_ADVANCE_RIP();
700 IEM_MC_END();
701 }
702 return VINF_SUCCESS;
703}
704
705
706/** Opcode 0x0f 0x00 /1. */
707FNIEMOPRM_DEF(iemOp_Grp6_str)
708{
709 IEMOP_MNEMONIC(str, "str Rv/Mw");
710 IEMOP_HLP_MIN_286();
711 IEMOP_HLP_NO_REAL_OR_V86_MODE();
712
713 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
714 {
715 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
716 switch (pVCpu->iem.s.enmEffOpSize)
717 {
718 case IEMMODE_16BIT:
719 IEM_MC_BEGIN(0, 1);
720 IEM_MC_LOCAL(uint16_t, u16Tr);
721 IEM_MC_FETCH_TR_U16(u16Tr);
722 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
723 IEM_MC_ADVANCE_RIP();
724 IEM_MC_END();
725 break;
726
727 case IEMMODE_32BIT:
728 IEM_MC_BEGIN(0, 1);
729 IEM_MC_LOCAL(uint32_t, u32Tr);
730 IEM_MC_FETCH_TR_U32(u32Tr);
731 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
732 IEM_MC_ADVANCE_RIP();
733 IEM_MC_END();
734 break;
735
736 case IEMMODE_64BIT:
737 IEM_MC_BEGIN(0, 1);
738 IEM_MC_LOCAL(uint64_t, u64Tr);
739 IEM_MC_FETCH_TR_U64(u64Tr);
740 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
741 IEM_MC_ADVANCE_RIP();
742 IEM_MC_END();
743 break;
744
745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
746 }
747 }
748 else
749 {
750 IEM_MC_BEGIN(0, 2);
751 IEM_MC_LOCAL(uint16_t, u16Tr);
752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
754 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
755 IEM_MC_FETCH_TR_U16(u16Tr);
756 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
757 IEM_MC_ADVANCE_RIP();
758 IEM_MC_END();
759 }
760 return VINF_SUCCESS;
761}
762
763
764/** Opcode 0x0f 0x00 /2. */
765FNIEMOPRM_DEF(iemOp_Grp6_lldt)
766{
767 IEMOP_MNEMONIC(lldt, "lldt Ew");
768 IEMOP_HLP_MIN_286();
769 IEMOP_HLP_NO_REAL_OR_V86_MODE();
770
771 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
772 {
773 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
774 IEM_MC_BEGIN(1, 0);
775 IEM_MC_ARG(uint16_t, u16Sel, 0);
776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
777 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
778 IEM_MC_END();
779 }
780 else
781 {
782 IEM_MC_BEGIN(1, 1);
783 IEM_MC_ARG(uint16_t, u16Sel, 0);
784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
786 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
787 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
788 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
789 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
790 IEM_MC_END();
791 }
792 return VINF_SUCCESS;
793}
794
795
796/** Opcode 0x0f 0x00 /3. */
797FNIEMOPRM_DEF(iemOp_Grp6_ltr)
798{
799 IEMOP_MNEMONIC(ltr, "ltr Ew");
800 IEMOP_HLP_MIN_286();
801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
802
803 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
804 {
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
806 IEM_MC_BEGIN(1, 0);
807 IEM_MC_ARG(uint16_t, u16Sel, 0);
808 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
809 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
810 IEM_MC_END();
811 }
812 else
813 {
814 IEM_MC_BEGIN(1, 1);
815 IEM_MC_ARG(uint16_t, u16Sel, 0);
816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
819 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
820 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
821 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
822 IEM_MC_END();
823 }
824 return VINF_SUCCESS;
825}
826
827
828/** Opcode 0x0f 0x00 /3. */
829FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
830{
831 IEMOP_HLP_MIN_286();
832 IEMOP_HLP_NO_REAL_OR_V86_MODE();
833
834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
835 {
836 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
837 IEM_MC_BEGIN(2, 0);
838 IEM_MC_ARG(uint16_t, u16Sel, 0);
839 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
840 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
841 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
842 IEM_MC_END();
843 }
844 else
845 {
846 IEM_MC_BEGIN(2, 1);
847 IEM_MC_ARG(uint16_t, u16Sel, 0);
848 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
849 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
851 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
852 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
853 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
854 IEM_MC_END();
855 }
856 return VINF_SUCCESS;
857}
858
859
860/** Opcode 0x0f 0x00 /4. */
861FNIEMOPRM_DEF(iemOp_Grp6_verr)
862{
863 IEMOP_MNEMONIC(verr, "verr Ew");
864 IEMOP_HLP_MIN_286();
865 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
866}
867
868
869/** Opcode 0x0f 0x00 /5. */
870FNIEMOPRM_DEF(iemOp_Grp6_verw)
871{
872 IEMOP_MNEMONIC(verw, "verw Ew");
873 IEMOP_HLP_MIN_286();
874 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
875}
876
877
878/**
879 * Group 6 jump table.
880 */
881IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
882{
883 iemOp_Grp6_sldt,
884 iemOp_Grp6_str,
885 iemOp_Grp6_lldt,
886 iemOp_Grp6_ltr,
887 iemOp_Grp6_verr,
888 iemOp_Grp6_verw,
889 iemOp_InvalidWithRM,
890 iemOp_InvalidWithRM
891};
892
893/** Opcode 0x0f 0x00. */
894FNIEMOP_DEF(iemOp_Grp6)
895{
896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
897 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
898}
899
900
901/** Opcode 0x0f 0x01 /0. */
902FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
903{
904 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
905 IEMOP_HLP_MIN_286();
906 IEMOP_HLP_64BIT_OP_SIZE();
907 IEM_MC_BEGIN(2, 1);
908 IEM_MC_ARG(uint8_t, iEffSeg, 0);
909 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
913 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
914 IEM_MC_END();
915 return VINF_SUCCESS;
916}
917
918
919/** Opcode 0x0f 0x01 /0. */
920FNIEMOP_DEF(iemOp_Grp7_vmcall)
921{
922 IEMOP_BITCH_ABOUT_STUB();
923 return IEMOP_RAISE_INVALID_OPCODE();
924}
925
926
927/** Opcode 0x0f 0x01 /0. */
928FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
929{
930 IEMOP_BITCH_ABOUT_STUB();
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 /0. */
936FNIEMOP_DEF(iemOp_Grp7_vmresume)
937{
938 IEMOP_BITCH_ABOUT_STUB();
939 return IEMOP_RAISE_INVALID_OPCODE();
940}
941
942
943/** Opcode 0x0f 0x01 /0. */
944FNIEMOP_DEF(iemOp_Grp7_vmxoff)
945{
946 IEMOP_BITCH_ABOUT_STUB();
947 return IEMOP_RAISE_INVALID_OPCODE();
948}
949
950
951/** Opcode 0x0f 0x01 /1. */
952FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
953{
954 IEMOP_MNEMONIC(sidt, "sidt Ms");
955 IEMOP_HLP_MIN_286();
956 IEMOP_HLP_64BIT_OP_SIZE();
957 IEM_MC_BEGIN(2, 1);
958 IEM_MC_ARG(uint8_t, iEffSeg, 0);
959 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
962 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
963 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
964 IEM_MC_END();
965 return VINF_SUCCESS;
966}
967
968
969/** Opcode 0x0f 0x01 /1. */
970FNIEMOP_DEF(iemOp_Grp7_monitor)
971{
972 IEMOP_MNEMONIC(monitor, "monitor");
973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
974 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
975}
976
977
978/** Opcode 0x0f 0x01 /1. */
979FNIEMOP_DEF(iemOp_Grp7_mwait)
980{
981 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
983 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
984}
985
986
987/** Opcode 0x0f 0x01 /2. */
988FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
989{
990 IEMOP_MNEMONIC(lgdt, "lgdt");
991 IEMOP_HLP_64BIT_OP_SIZE();
992 IEM_MC_BEGIN(3, 1);
993 IEM_MC_ARG(uint8_t, iEffSeg, 0);
994 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
995 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
998 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
999 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1000 IEM_MC_END();
1001 return VINF_SUCCESS;
1002}
1003
1004
1005/** Opcode 0x0f 0x01 0xd0. */
1006FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1007{
1008 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1009 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1010 {
1011 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1012 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
1013 }
1014 return IEMOP_RAISE_INVALID_OPCODE();
1015}
1016
1017
1018/** Opcode 0x0f 0x01 0xd1. */
1019FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1020{
1021 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1022 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1023 {
1024 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1025 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
1026 }
1027 return IEMOP_RAISE_INVALID_OPCODE();
1028}
1029
1030
1031/** Opcode 0x0f 0x01 /3. */
1032FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1033{
1034 IEMOP_MNEMONIC(lidt, "lidt");
1035 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
1036 ? IEMMODE_64BIT
1037 : pVCpu->iem.s.enmEffOpSize;
1038 IEM_MC_BEGIN(3, 1);
1039 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1040 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1041 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1044 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1045 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1046 IEM_MC_END();
1047 return VINF_SUCCESS;
1048}
1049
1050
1051/** Opcode 0x0f 0x01 0xd8. */
1052FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1053
1054/** Opcode 0x0f 0x01 0xd9. */
1055FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
1056
1057/** Opcode 0x0f 0x01 0xda. */
1058FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1059
1060/** Opcode 0x0f 0x01 0xdb. */
1061FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1062
1063/** Opcode 0x0f 0x01 0xdc. */
1064FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1065
1066/** Opcode 0x0f 0x01 0xdd. */
1067FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1068
1069/** Opcode 0x0f 0x01 0xde. */
1070FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1071
1072/** Opcode 0x0f 0x01 0xdf. */
1073FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1074
1075/** Opcode 0x0f 0x01 /4. */
1076FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1077{
1078 IEMOP_MNEMONIC(smsw, "smsw");
1079 IEMOP_HLP_MIN_286();
1080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1081 {
1082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1083 switch (pVCpu->iem.s.enmEffOpSize)
1084 {
1085 case IEMMODE_16BIT:
1086 IEM_MC_BEGIN(0, 1);
1087 IEM_MC_LOCAL(uint16_t, u16Tmp);
1088 IEM_MC_FETCH_CR0_U16(u16Tmp);
1089 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1090 { /* likely */ }
1091 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1092 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1093 else
1094 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1096 IEM_MC_ADVANCE_RIP();
1097 IEM_MC_END();
1098 return VINF_SUCCESS;
1099
1100 case IEMMODE_32BIT:
1101 IEM_MC_BEGIN(0, 1);
1102 IEM_MC_LOCAL(uint32_t, u32Tmp);
1103 IEM_MC_FETCH_CR0_U32(u32Tmp);
1104 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1105 IEM_MC_ADVANCE_RIP();
1106 IEM_MC_END();
1107 return VINF_SUCCESS;
1108
1109 case IEMMODE_64BIT:
1110 IEM_MC_BEGIN(0, 1);
1111 IEM_MC_LOCAL(uint64_t, u64Tmp);
1112 IEM_MC_FETCH_CR0_U64(u64Tmp);
1113 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 return VINF_SUCCESS;
1117
1118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1119 }
1120 }
1121 else
1122 {
1123 /* Ignore operand size here, memory refs are always 16-bit. */
1124 IEM_MC_BEGIN(0, 2);
1125 IEM_MC_LOCAL(uint16_t, u16Tmp);
1126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1129 IEM_MC_FETCH_CR0_U16(u16Tmp);
1130 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1131 { /* likely */ }
1132 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1133 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1134 else
1135 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1136 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1137 IEM_MC_ADVANCE_RIP();
1138 IEM_MC_END();
1139 return VINF_SUCCESS;
1140 }
1141}
1142
1143
1144/** Opcode 0x0f 0x01 /6. */
1145FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1146{
1147 /* The operand size is effectively ignored, all is 16-bit and only the
1148 lower 3-bits are used. */
1149 IEMOP_MNEMONIC(lmsw, "lmsw");
1150 IEMOP_HLP_MIN_286();
1151 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1152 {
1153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1154 IEM_MC_BEGIN(1, 0);
1155 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1156 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1157 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1158 IEM_MC_END();
1159 }
1160 else
1161 {
1162 IEM_MC_BEGIN(1, 1);
1163 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1167 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1168 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1169 IEM_MC_END();
1170 }
1171 return VINF_SUCCESS;
1172}
1173
1174
1175/** Opcode 0x0f 0x01 /7. */
1176FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1177{
1178 IEMOP_MNEMONIC(invlpg, "invlpg");
1179 IEMOP_HLP_MIN_486();
1180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1181 IEM_MC_BEGIN(1, 1);
1182 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1184 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1185 IEM_MC_END();
1186 return VINF_SUCCESS;
1187}
1188
1189
1190/** Opcode 0x0f 0x01 /7. */
1191FNIEMOP_DEF(iemOp_Grp7_swapgs)
1192{
1193 IEMOP_MNEMONIC(swapgs, "swapgs");
1194 IEMOP_HLP_ONLY_64BIT();
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1196 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1197}
1198
1199
1200/** Opcode 0x0f 0x01 /7. */
1201FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1202{
1203 NOREF(pVCpu);
1204 IEMOP_BITCH_ABOUT_STUB();
1205 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1206}
1207
1208
1209/** Opcode 0x0f 0x01. */
1210FNIEMOP_DEF(iemOp_Grp7)
1211{
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1214 {
1215 case 0:
1216 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1217 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1218 switch (bRm & X86_MODRM_RM_MASK)
1219 {
1220 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1221 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1222 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1223 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1224 }
1225 return IEMOP_RAISE_INVALID_OPCODE();
1226
1227 case 1:
1228 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1229 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1230 switch (bRm & X86_MODRM_RM_MASK)
1231 {
1232 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1233 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1234 }
1235 return IEMOP_RAISE_INVALID_OPCODE();
1236
1237 case 2:
1238 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1239 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1240 switch (bRm & X86_MODRM_RM_MASK)
1241 {
1242 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1243 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1244 }
1245 return IEMOP_RAISE_INVALID_OPCODE();
1246
1247 case 3:
1248 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1249 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1250 switch (bRm & X86_MODRM_RM_MASK)
1251 {
1252 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1253 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1254 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1255 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1256 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1257 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1258 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1259 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1261 }
1262
1263 case 4:
1264 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1265
1266 case 5:
1267 return IEMOP_RAISE_INVALID_OPCODE();
1268
1269 case 6:
1270 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1271
1272 case 7:
1273 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1274 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1275 switch (bRm & X86_MODRM_RM_MASK)
1276 {
1277 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1278 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1279 }
1280 return IEMOP_RAISE_INVALID_OPCODE();
1281
1282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1283 }
1284}
1285
1286/** Opcode 0x0f 0x00 /3. */
1287FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1288{
1289 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1290 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1291
1292 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1293 {
1294 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1295 switch (pVCpu->iem.s.enmEffOpSize)
1296 {
1297 case IEMMODE_16BIT:
1298 {
1299 IEM_MC_BEGIN(3, 0);
1300 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1301 IEM_MC_ARG(uint16_t, u16Sel, 1);
1302 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1303
1304 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1305 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1306 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1307
1308 IEM_MC_END();
1309 return VINF_SUCCESS;
1310 }
1311
1312 case IEMMODE_32BIT:
1313 case IEMMODE_64BIT:
1314 {
1315 IEM_MC_BEGIN(3, 0);
1316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1317 IEM_MC_ARG(uint16_t, u16Sel, 1);
1318 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1319
1320 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1321 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1322 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1323
1324 IEM_MC_END();
1325 return VINF_SUCCESS;
1326 }
1327
1328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1329 }
1330 }
1331 else
1332 {
1333 switch (pVCpu->iem.s.enmEffOpSize)
1334 {
1335 case IEMMODE_16BIT:
1336 {
1337 IEM_MC_BEGIN(3, 1);
1338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1339 IEM_MC_ARG(uint16_t, u16Sel, 1);
1340 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1342
1343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1344 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1345
1346 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1347 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1348 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1349
1350 IEM_MC_END();
1351 return VINF_SUCCESS;
1352 }
1353
1354 case IEMMODE_32BIT:
1355 case IEMMODE_64BIT:
1356 {
1357 IEM_MC_BEGIN(3, 1);
1358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1359 IEM_MC_ARG(uint16_t, u16Sel, 1);
1360 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1362
1363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1364 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1365/** @todo testcase: make sure it's a 16-bit read. */
1366
1367 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1368 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1369 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1370
1371 IEM_MC_END();
1372 return VINF_SUCCESS;
1373 }
1374
1375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1376 }
1377 }
1378}
1379
1380
1381
1382/** Opcode 0x0f 0x02. */
1383FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1384{
1385 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1386 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1387}
1388
1389
1390/** Opcode 0x0f 0x03. */
1391FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1392{
1393 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1394 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1395}
1396
1397
1398/** Opcode 0x0f 0x05. */
1399FNIEMOP_DEF(iemOp_syscall)
1400{
1401 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1404}
1405
1406
1407/** Opcode 0x0f 0x06. */
1408FNIEMOP_DEF(iemOp_clts)
1409{
1410 IEMOP_MNEMONIC(clts, "clts");
1411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1412 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1413}
1414
1415
1416/** Opcode 0x0f 0x07. */
1417FNIEMOP_DEF(iemOp_sysret)
1418{
1419 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1421 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1422}
1423
1424
1425/** Opcode 0x0f 0x08. */
1426FNIEMOP_STUB(iemOp_invd);
1427// IEMOP_HLP_MIN_486();
1428
1429
1430/** Opcode 0x0f 0x09. */
1431FNIEMOP_DEF(iemOp_wbinvd)
1432{
1433 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1434 IEMOP_HLP_MIN_486();
1435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1436 IEM_MC_BEGIN(0, 0);
1437 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1438 IEM_MC_ADVANCE_RIP();
1439 IEM_MC_END();
1440 return VINF_SUCCESS; /* ignore for now */
1441}
1442
1443
1444/** Opcode 0x0f 0x0b. */
1445FNIEMOP_DEF(iemOp_ud2)
1446{
1447 IEMOP_MNEMONIC(ud2, "ud2");
1448 return IEMOP_RAISE_INVALID_OPCODE();
1449}
1450
1451/** Opcode 0x0f 0x0d. */
1452FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1453{
1454 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1455 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1456 {
1457 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1458 return IEMOP_RAISE_INVALID_OPCODE();
1459 }
1460
1461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1462 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1463 {
1464 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1465 return IEMOP_RAISE_INVALID_OPCODE();
1466 }
1467
1468 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1469 {
1470 case 2: /* Aliased to /0 for the time being. */
1471 case 4: /* Aliased to /0 for the time being. */
1472 case 5: /* Aliased to /0 for the time being. */
1473 case 6: /* Aliased to /0 for the time being. */
1474 case 7: /* Aliased to /0 for the time being. */
1475 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1476 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1477 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1479 }
1480
1481 IEM_MC_BEGIN(0, 1);
1482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1485 /* Currently a NOP. */
1486 NOREF(GCPtrEffSrc);
1487 IEM_MC_ADVANCE_RIP();
1488 IEM_MC_END();
1489 return VINF_SUCCESS;
1490}
1491
1492
1493/** Opcode 0x0f 0x0e. */
1494FNIEMOP_STUB(iemOp_femms);
1495
1496
1497/** Opcode 0x0f 0x0f 0x0c. */
1498FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1499
1500/** Opcode 0x0f 0x0f 0x0d. */
1501FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1502
1503/** Opcode 0x0f 0x0f 0x1c. */
1504FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1505
1506/** Opcode 0x0f 0x0f 0x1d. */
1507FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1508
1509/** Opcode 0x0f 0x0f 0x8a. */
1510FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1511
1512/** Opcode 0x0f 0x0f 0x8e. */
1513FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1514
1515/** Opcode 0x0f 0x0f 0x90. */
1516FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1517
1518/** Opcode 0x0f 0x0f 0x94. */
1519FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1520
1521/** Opcode 0x0f 0x0f 0x96. */
1522FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1523
1524/** Opcode 0x0f 0x0f 0x97. */
1525FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1526
1527/** Opcode 0x0f 0x0f 0x9a. */
1528FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1529
1530/** Opcode 0x0f 0x0f 0x9e. */
1531FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1532
1533/** Opcode 0x0f 0x0f 0xa0. */
1534FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1535
1536/** Opcode 0x0f 0x0f 0xa4. */
1537FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1538
1539/** Opcode 0x0f 0x0f 0xa6. */
1540FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1541
1542/** Opcode 0x0f 0x0f 0xa7. */
1543FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1544
1545/** Opcode 0x0f 0x0f 0xaa. */
1546FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1547
1548/** Opcode 0x0f 0x0f 0xae. */
1549FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1550
1551/** Opcode 0x0f 0x0f 0xb0. */
1552FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1553
1554/** Opcode 0x0f 0x0f 0xb4. */
1555FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1556
1557/** Opcode 0x0f 0x0f 0xb6. */
1558FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1559
1560/** Opcode 0x0f 0x0f 0xb7. */
1561FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1562
1563/** Opcode 0x0f 0x0f 0xbb. */
1564FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1565
1566/** Opcode 0x0f 0x0f 0xbf. */
1567FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1568
1569
1570/** Opcode 0x0f 0x0f. */
1571FNIEMOP_DEF(iemOp_3Dnow)
1572{
1573 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1574 {
1575 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578
1579 /* This is pretty sparse, use switch instead of table. */
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 switch (b)
1582 {
1583 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1584 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1585 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1586 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1587 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1588 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1589 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1590 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1591 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1592 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1593 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1594 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1595 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1596 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1597 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1598 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1599 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1600 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1601 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1602 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1603 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1604 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1605 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1606 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1607 default:
1608 return IEMOP_RAISE_INVALID_OPCODE();
1609 }
1610}
1611
1612
1613/** Opcode 0x0f 0x10 - vmovups Vps, Wps */
1614FNIEMOP_STUB(iemOp_vmovups_Vps_Wps);
1615/** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */
1616FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd);
1617/** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */
1618FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss);
1619/** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */
1620FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd);
1621
1622
1623/** Opcode 0x0f 0x11 - vmovups Wps, Vps */
1624FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
1625{
1626 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1629 {
1630 /*
1631 * Register, register.
1632 */
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1634 IEM_MC_BEGIN(0, 0);
1635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1637 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1638 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 }
1642 else
1643 {
1644 /*
1645 * Memory, register.
1646 */
1647 IEM_MC_BEGIN(0, 2);
1648 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650
1651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1655
1656 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1657 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1658
1659 IEM_MC_ADVANCE_RIP();
1660 IEM_MC_END();
1661 }
1662 return VINF_SUCCESS;
1663}
1664
1665
1666/** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */
1667FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd);
1668
1669/** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */
1670FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss);
1671
1672/** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */
1673FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd)
1674{
1675 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1678 {
1679 /*
1680 * Register, register.
1681 */
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 IEM_MC_BEGIN(0, 1);
1684 IEM_MC_LOCAL(uint64_t, uSrc);
1685
1686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1688 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1689 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1690
1691 IEM_MC_ADVANCE_RIP();
1692 IEM_MC_END();
1693 }
1694 else
1695 {
1696 /*
1697 * Memory, register.
1698 */
1699 IEM_MC_BEGIN(0, 2);
1700 IEM_MC_LOCAL(uint64_t, uSrc);
1701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1702
1703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1707
1708 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1709 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1710
1711 IEM_MC_ADVANCE_RIP();
1712 IEM_MC_END();
1713 }
1714 return VINF_SUCCESS;
1715}
1716
1717
1718/** Opcode 0x0f 0x12. */
1719FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); //NEXT
1720
1721/** Opcode 0x66 0x0f 0x12. */
1722FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); //NEXT
1723
1724/** Opcode 0xf3 0x0f 0x12. */
1725FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); //NEXT
1726
1727/** Opcode 0xf2 0x0f 0x12. */
1728FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); //NEXT
1729
1730/** Opcode 0x0f 0x13 - vmovlps Mq, Vq */
1731FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq);
1732
1733/** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */
1734FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1735{
1736 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1739 {
1740#if 0
1741 /*
1742 * Register, register.
1743 */
1744 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1745 IEM_MC_BEGIN(0, 1);
1746 IEM_MC_LOCAL(uint64_t, uSrc);
1747 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1748 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1749 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1750 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1751 IEM_MC_ADVANCE_RIP();
1752 IEM_MC_END();
1753#else
1754 return IEMOP_RAISE_INVALID_OPCODE();
1755#endif
1756 }
1757 else
1758 {
1759 /*
1760 * Memory, register.
1761 */
1762 IEM_MC_BEGIN(0, 2);
1763 IEM_MC_LOCAL(uint64_t, uSrc);
1764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1765
1766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1767 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1768 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1769 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1770
1771 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1772 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1773
1774 IEM_MC_ADVANCE_RIP();
1775 IEM_MC_END();
1776 }
1777 return VINF_SUCCESS;
1778}
1779
1780/* Opcode 0xf3 0x0f 0x13 - invalid */
1781/* Opcode 0xf2 0x0f 0x13 - invalid */
1782
1783/** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/
1784FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx);
1785/** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */
1786FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx);
1787/* Opcode 0xf3 0x0f 0x14 - invalid */
1788/* Opcode 0xf2 0x0f 0x14 - invalid */
1789/** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */
1790FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx);
1791/** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */
1792FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx);
1793/* Opcode 0xf3 0x0f 0x15 - invalid */
1794/* Opcode 0xf2 0x0f 0x15 - invalid */
1795/** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */
1796FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT
1797/** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */
1798FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT
1799/** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */
1800FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT
1801/* Opcode 0xf2 0x0f 0x16 - invalid */
1802/** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */
1803FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT
1804/** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */
1805FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT
1806/* Opcode 0xf3 0x0f 0x17 - invalid */
1807/* Opcode 0xf2 0x0f 0x17 - invalid */
1808
1809
1810/** Opcode 0x0f 0x18. */
1811FNIEMOP_DEF(iemOp_prefetch_Grp16)
1812{
1813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1814 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1815 {
1816 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1817 {
1818 case 4: /* Aliased to /0 for the time being according to AMD. */
1819 case 5: /* Aliased to /0 for the time being according to AMD. */
1820 case 6: /* Aliased to /0 for the time being according to AMD. */
1821 case 7: /* Aliased to /0 for the time being according to AMD. */
1822 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1823 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1824 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1825 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1827 }
1828
1829 IEM_MC_BEGIN(0, 1);
1830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1833 /* Currently a NOP. */
1834 NOREF(GCPtrEffSrc);
1835 IEM_MC_ADVANCE_RIP();
1836 IEM_MC_END();
1837 return VINF_SUCCESS;
1838 }
1839
1840 return IEMOP_RAISE_INVALID_OPCODE();
1841}
1842
1843
1844/** Opcode 0x0f 0x19..0x1f. */
1845FNIEMOP_DEF(iemOp_nop_Ev)
1846{
1847 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1850 {
1851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1852 IEM_MC_BEGIN(0, 0);
1853 IEM_MC_ADVANCE_RIP();
1854 IEM_MC_END();
1855 }
1856 else
1857 {
1858 IEM_MC_BEGIN(0, 1);
1859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1862 /* Currently a NOP. */
1863 NOREF(GCPtrEffSrc);
1864 IEM_MC_ADVANCE_RIP();
1865 IEM_MC_END();
1866 }
1867 return VINF_SUCCESS;
1868}
1869
1870
1871/** Opcode 0x0f 0x20. */
1872FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1873{
1874 /* mod is ignored, as is operand size overrides. */
1875 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1876 IEMOP_HLP_MIN_386();
1877 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1878 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1879 else
1880 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1881
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1884 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1885 {
1886 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1887 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1888 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1889 iCrReg |= 8;
1890 }
1891 switch (iCrReg)
1892 {
1893 case 0: case 2: case 3: case 4: case 8:
1894 break;
1895 default:
1896 return IEMOP_RAISE_INVALID_OPCODE();
1897 }
1898 IEMOP_HLP_DONE_DECODING();
1899
1900 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1901}
1902
1903
1904/** Opcode 0x0f 0x21. */
1905FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1906{
1907 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1908 IEMOP_HLP_MIN_386();
1909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1911 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1912 return IEMOP_RAISE_INVALID_OPCODE();
1913 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1914 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1915 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1916}
1917
1918
1919/** Opcode 0x0f 0x22. */
1920FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1921{
1922 /* mod is ignored, as is operand size overrides. */
1923 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1924 IEMOP_HLP_MIN_386();
1925 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1926 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1927 else
1928 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1929
1930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1931 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1932 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1933 {
1934 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1935 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1936 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1937 iCrReg |= 8;
1938 }
1939 switch (iCrReg)
1940 {
1941 case 0: case 2: case 3: case 4: case 8:
1942 break;
1943 default:
1944 return IEMOP_RAISE_INVALID_OPCODE();
1945 }
1946 IEMOP_HLP_DONE_DECODING();
1947
1948 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1949}
1950
1951
1952/** Opcode 0x0f 0x23. */
1953FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1954{
1955 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1956 IEMOP_HLP_MIN_386();
1957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1959 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1960 return IEMOP_RAISE_INVALID_OPCODE();
1961 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1962 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1963 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1964}
1965
1966
1967/** Opcode 0x0f 0x24. */
1968FNIEMOP_DEF(iemOp_mov_Rd_Td)
1969{
1970 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1971 /** @todo works on 386 and 486. */
1972 /* The RM byte is not considered, see testcase. */
1973 return IEMOP_RAISE_INVALID_OPCODE();
1974}
1975
1976
1977/** Opcode 0x0f 0x26. */
1978FNIEMOP_DEF(iemOp_mov_Td_Rd)
1979{
1980 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1981 /** @todo works on 386 and 486. */
1982 /* The RM byte is not considered, see testcase. */
1983 return IEMOP_RAISE_INVALID_OPCODE();
1984}
1985
1986
1987/** Opcode 0x0f 0x28 - vmovaps Vps, Wps */
1988FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1989{
1990 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1993 {
1994 /*
1995 * Register, register.
1996 */
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_BEGIN(0, 0);
1999 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2000 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2001 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2002 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2003 IEM_MC_ADVANCE_RIP();
2004 IEM_MC_END();
2005 }
2006 else
2007 {
2008 /*
2009 * Register, memory.
2010 */
2011 IEM_MC_BEGIN(0, 2);
2012 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2014
2015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2017 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2018 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2019
2020 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2021 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2022
2023 IEM_MC_ADVANCE_RIP();
2024 IEM_MC_END();
2025 }
2026 return VINF_SUCCESS;
2027}
2028
2029/** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */
2030FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2031{
2032 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2035 {
2036 /*
2037 * Register, register.
2038 */
2039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2040 IEM_MC_BEGIN(0, 0);
2041 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2042 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2043 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2044 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2045 IEM_MC_ADVANCE_RIP();
2046 IEM_MC_END();
2047 }
2048 else
2049 {
2050 /*
2051 * Register, memory.
2052 */
2053 IEM_MC_BEGIN(0, 2);
2054 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2056
2057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2060 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2061
2062 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2063 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
2064
2065 IEM_MC_ADVANCE_RIP();
2066 IEM_MC_END();
2067 }
2068 return VINF_SUCCESS;
2069}
2070
2071/* Opcode 0xf3 0x0f 0x28 - invalid */
2072/* Opcode 0xf2 0x0f 0x28 - invalid */
2073
2074/** Opcode 0x0f 0x29. */
2075FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
2076{
2077 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2078 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
2079 else
2080 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2083 {
2084 /*
2085 * Register, register.
2086 */
2087 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
2088 IEM_MC_BEGIN(0, 0);
2089 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2090 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2091 else
2092 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2093 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2094 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
2095 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2096 IEM_MC_ADVANCE_RIP();
2097 IEM_MC_END();
2098 }
2099 else
2100 {
2101 /*
2102 * Memory, register.
2103 */
2104 IEM_MC_BEGIN(0, 2);
2105 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2107
2108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2109 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2110 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2111 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2112 else
2113 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2115
2116 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2117 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2118
2119 IEM_MC_ADVANCE_RIP();
2120 IEM_MC_END();
2121 }
2122 return VINF_SUCCESS;
2123}
2124
2125
2126/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
2127FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT
2128/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
2129FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT
2130/** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */
2131FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT
2132/** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2133FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT
2134
2135
2136/** Opcode 0x0f 0x2b. */
2137FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
2138{
2139 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2140 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
2141 else
2142 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
2143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2144 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2145 {
2146 /*
2147 * memory, register.
2148 */
2149 IEM_MC_BEGIN(0, 2);
2150 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
2151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2152
2153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2155 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2156 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2157 else
2158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2159 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2160
2161 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2162 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2163
2164 IEM_MC_ADVANCE_RIP();
2165 IEM_MC_END();
2166 }
2167 /* The register, register encoding is invalid. */
2168 else
2169 return IEMOP_RAISE_INVALID_OPCODE();
2170 return VINF_SUCCESS;
2171}
2172
2173
2174/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
2175FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps);
2176/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
2177FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd);
2178/** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */
2179FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2180/** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */
2181FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2182
2183/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
2184FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps);
2185/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
2186FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd);
2187/** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */
2188FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2189/** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */
2190FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2191
2192/** Opcode 0x0f 0x2e - vucomiss Vss, Wss */
2193FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT
2194/** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */
2195FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT
2196/* Opcode 0xf3 0x0f 0x2e - invalid */
2197/* Opcode 0xf2 0x0f 0x2e - invalid */
2198
2199/** Opcode 0x0f 0x2f - vcomiss Vss, Wss */
2200FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss);
2201/** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */
2202FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd);
2203/* Opcode 0xf3 0x0f 0x2f - invalid */
2204/* Opcode 0xf2 0x0f 0x2f - invalid */
2205
2206/** Opcode 0x0f 0x30. */
2207FNIEMOP_DEF(iemOp_wrmsr)
2208{
2209 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2211 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2212}
2213
2214
2215/** Opcode 0x0f 0x31. */
2216FNIEMOP_DEF(iemOp_rdtsc)
2217{
2218 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2221}
2222
2223
2224/** Opcode 0x0f 0x33. */
2225FNIEMOP_DEF(iemOp_rdmsr)
2226{
2227 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2229 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2230}
2231
2232
2233/** Opcode 0x0f 0x34. */
2234FNIEMOP_STUB(iemOp_rdpmc);
2235/** Opcode 0x0f 0x34. */
2236FNIEMOP_STUB(iemOp_sysenter);
2237/** Opcode 0x0f 0x35. */
2238FNIEMOP_STUB(iemOp_sysexit);
2239/** Opcode 0x0f 0x37. */
2240FNIEMOP_STUB(iemOp_getsec);
2241/** Opcode 0x0f 0x38. */
2242FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2243/** Opcode 0x0f 0x3a. */
2244FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2245
2246
2247/**
2248 * Implements a conditional move.
2249 *
2250 * Wish there was an obvious way to do this where we could share and reduce
2251 * code bloat.
2252 *
2253 * @param a_Cnd The conditional "microcode" operation.
2254 */
2255#define CMOV_X(a_Cnd) \
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2257 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2258 { \
2259 switch (pVCpu->iem.s.enmEffOpSize) \
2260 { \
2261 case IEMMODE_16BIT: \
2262 IEM_MC_BEGIN(0, 1); \
2263 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2264 a_Cnd { \
2265 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2266 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2267 } IEM_MC_ENDIF(); \
2268 IEM_MC_ADVANCE_RIP(); \
2269 IEM_MC_END(); \
2270 return VINF_SUCCESS; \
2271 \
2272 case IEMMODE_32BIT: \
2273 IEM_MC_BEGIN(0, 1); \
2274 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2275 a_Cnd { \
2276 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2277 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2278 } IEM_MC_ELSE() { \
2279 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2280 } IEM_MC_ENDIF(); \
2281 IEM_MC_ADVANCE_RIP(); \
2282 IEM_MC_END(); \
2283 return VINF_SUCCESS; \
2284 \
2285 case IEMMODE_64BIT: \
2286 IEM_MC_BEGIN(0, 1); \
2287 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2288 a_Cnd { \
2289 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2290 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2291 } IEM_MC_ENDIF(); \
2292 IEM_MC_ADVANCE_RIP(); \
2293 IEM_MC_END(); \
2294 return VINF_SUCCESS; \
2295 \
2296 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2297 } \
2298 } \
2299 else \
2300 { \
2301 switch (pVCpu->iem.s.enmEffOpSize) \
2302 { \
2303 case IEMMODE_16BIT: \
2304 IEM_MC_BEGIN(0, 2); \
2305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2306 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2308 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2309 a_Cnd { \
2310 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2311 } IEM_MC_ENDIF(); \
2312 IEM_MC_ADVANCE_RIP(); \
2313 IEM_MC_END(); \
2314 return VINF_SUCCESS; \
2315 \
2316 case IEMMODE_32BIT: \
2317 IEM_MC_BEGIN(0, 2); \
2318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2319 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2321 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2322 a_Cnd { \
2323 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2324 } IEM_MC_ELSE() { \
2325 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2326 } IEM_MC_ENDIF(); \
2327 IEM_MC_ADVANCE_RIP(); \
2328 IEM_MC_END(); \
2329 return VINF_SUCCESS; \
2330 \
2331 case IEMMODE_64BIT: \
2332 IEM_MC_BEGIN(0, 2); \
2333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2334 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2336 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2337 a_Cnd { \
2338 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2339 } IEM_MC_ENDIF(); \
2340 IEM_MC_ADVANCE_RIP(); \
2341 IEM_MC_END(); \
2342 return VINF_SUCCESS; \
2343 \
2344 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2345 } \
2346 } do {} while (0)
2347
2348
2349
2350/** Opcode 0x0f 0x40. */
2351FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2352{
2353 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2354 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2355}
2356
2357
2358/** Opcode 0x0f 0x41. */
2359FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2360{
2361 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2362 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2363}
2364
2365
2366/** Opcode 0x0f 0x42. */
2367FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2368{
2369 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2370 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2371}
2372
2373
2374/** Opcode 0x0f 0x43. */
2375FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2376{
2377 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2378 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2379}
2380
2381
2382/** Opcode 0x0f 0x44. */
2383FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2384{
2385 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2386 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2387}
2388
2389
2390/** Opcode 0x0f 0x45. */
2391FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2392{
2393 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2394 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2395}
2396
2397
2398/** Opcode 0x0f 0x46. */
2399FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2400{
2401 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2402 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2403}
2404
2405
2406/** Opcode 0x0f 0x47. */
2407FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2408{
2409 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2410 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2411}
2412
2413
2414/** Opcode 0x0f 0x48. */
2415FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2416{
2417 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2418 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2419}
2420
2421
2422/** Opcode 0x0f 0x49. */
2423FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2424{
2425 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2426 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2427}
2428
2429
2430/** Opcode 0x0f 0x4a. */
2431FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2432{
2433 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2434 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2435}
2436
2437
2438/** Opcode 0x0f 0x4b. */
2439FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2440{
2441 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2442 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2443}
2444
2445
2446/** Opcode 0x0f 0x4c. */
2447FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2448{
2449 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2450 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2451}
2452
2453
2454/** Opcode 0x0f 0x4d. */
2455FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2456{
2457 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2458 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2459}
2460
2461
2462/** Opcode 0x0f 0x4e. */
2463FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2464{
2465 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2466 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2467}
2468
2469
2470/** Opcode 0x0f 0x4f. */
2471FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2472{
2473 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2474 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2475}
2476
2477#undef CMOV_X
2478
2479/** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */
2480FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups);
2481/** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */
2482FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd);
2483/* Opcode 0xf3 0x0f 0x50 - invalid */
2484/* Opcode 0xf2 0x0f 0x50 - invalid */
2485
2486/** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */
2487FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2488/** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */
2489FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2490/** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */
2491FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2492/** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2493FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2494
2495/** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */
2496FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2497/* Opcode 0x66 0x0f 0x52 - invalid */
2498/** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */
2499FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2500/* Opcode 0xf2 0x0f 0x52 - invalid */
2501
2502/** Opcode 0x0f 0x53 - vrcpps Vps, Wps */
2503FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2504/* Opcode 0x66 0x0f 0x53 - invalid */
2505/** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */
2506FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2507/* Opcode 0xf2 0x0f 0x53 - invalid */
2508
2509/** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */
2510FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps);
2511/** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */
2512FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd);
2513/* Opcode 0xf3 0x0f 0x54 - invalid */
2514/* Opcode 0xf2 0x0f 0x54 - invalid */
2515
2516/** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */
2517FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps);
2518/** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */
2519FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd);
2520/* Opcode 0xf3 0x0f 0x55 - invalid */
2521/* Opcode 0xf2 0x0f 0x55 - invalid */
2522
2523/** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */
2524FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps);
2525/** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */
2526FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd);
2527/* Opcode 0xf3 0x0f 0x56 - invalid */
2528/* Opcode 0xf2 0x0f 0x56 - invalid */
2529
2530/** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */
2531FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps);
2532/** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */
2533FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd);
2534/* Opcode 0xf3 0x0f 0x57 - invalid */
2535/* Opcode 0xf2 0x0f 0x57 - invalid */
2536
2537/** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */
2538FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2539/** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */
2540FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2541/** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */
2542FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2543/** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */
2544FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2545
2546/** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */
2547FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2548/** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */
2549FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2550/** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */
2551FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2552/** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */
2553FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2554
2555/** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */
2556FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2557/** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */
2558FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2559/** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */
2560FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2561/** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2562FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2563
2564/** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */
2565FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2566/** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */
2567FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2568/** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */
2569FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2570/* Opcode 0xf2 0x0f 0x5b - invalid */
2571
2572/** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */
2573FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2574/** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */
2575FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2576/** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */
2577FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2578/** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */
2579FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2580
2581/** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */
2582FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2583/** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */
2584FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2585/** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */
2586FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2587/** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */
2588FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2589
2590/** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */
2591FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2592/** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */
2593FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2594/** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */
2595FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2596/** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */
2597FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2598
2599/** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */
2600FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2601/** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */
2602FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2603/** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */
2604FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2605/** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */
2606FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2607
2608/**
2609 * Common worker for MMX instructions on the forms:
2610 * pxxxx mm1, mm2/mem32
2611 *
2612 * The 2nd operand is the first half of a register, which in the memory case
2613 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2614 * memory accessed for MMX.
2615 *
2616 * Exceptions type 4.
2617 */
2618FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2619{
2620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2622 {
2623 /*
2624 * Register, register.
2625 */
2626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2627 IEM_MC_BEGIN(2, 0);
2628 IEM_MC_ARG(uint128_t *, pDst, 0);
2629 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2630 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2631 IEM_MC_PREPARE_SSE_USAGE();
2632 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2633 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2634 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2635 IEM_MC_ADVANCE_RIP();
2636 IEM_MC_END();
2637 }
2638 else
2639 {
2640 /*
2641 * Register, memory.
2642 */
2643 IEM_MC_BEGIN(2, 2);
2644 IEM_MC_ARG(uint128_t *, pDst, 0);
2645 IEM_MC_LOCAL(uint64_t, uSrc);
2646 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2648
2649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2651 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2652 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2653
2654 IEM_MC_PREPARE_SSE_USAGE();
2655 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2656 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2657
2658 IEM_MC_ADVANCE_RIP();
2659 IEM_MC_END();
2660 }
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/**
2666 * Common worker for SSE2 instructions on the forms:
2667 * pxxxx xmm1, xmm2/mem128
2668 *
2669 * The 2nd operand is the first half of a register, which in the memory case
2670 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2671 * memory accessed for MMX.
2672 *
2673 * Exceptions type 4.
2674 */
2675FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2676{
2677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2678 if (!pImpl->pfnU64)
2679 return IEMOP_RAISE_INVALID_OPCODE();
2680 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2681 {
2682 /*
2683 * Register, register.
2684 */
2685 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2686 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2688 IEM_MC_BEGIN(2, 0);
2689 IEM_MC_ARG(uint64_t *, pDst, 0);
2690 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2691 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2692 IEM_MC_PREPARE_FPU_USAGE();
2693 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2694 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2695 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2696 IEM_MC_ADVANCE_RIP();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /*
2702 * Register, memory.
2703 */
2704 IEM_MC_BEGIN(2, 2);
2705 IEM_MC_ARG(uint64_t *, pDst, 0);
2706 IEM_MC_LOCAL(uint32_t, uSrc);
2707 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2712 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2713 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2714
2715 IEM_MC_PREPARE_FPU_USAGE();
2716 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2717 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2718
2719 IEM_MC_ADVANCE_RIP();
2720 IEM_MC_END();
2721 }
2722 return VINF_SUCCESS;
2723}
2724
2725
2726/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
2727FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
2728{
2729 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd");
2730 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2731}
2732
2733/** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */
2734FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2735{
2736 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx");
2737 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2738}
2739
2740/* Opcode 0xf3 0x0f 0x60 - invalid */
2741
2742
2743/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
2744FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
2745{
2746 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2747 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2748}
2749
2750/** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */
2751FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2752{
2753 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx");
2754 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2755}
2756
2757/* Opcode 0xf3 0x0f 0x61 - invalid */
2758
2759
2760/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
2761FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
2762{
2763 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd");
2764 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq);
2765}
2766
2767/** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */
2768FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2769{
2770 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx");
2771 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2772}
2773
2774/* Opcode 0xf3 0x0f 0x62 - invalid */
2775
2776
2777
2778/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
2779FNIEMOP_STUB(iemOp_packsswb_Pq_Qq);
2780/** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */
2781FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx);
2782/* Opcode 0xf3 0x0f 0x63 - invalid */
2783
2784/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
2785FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq);
2786/** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */
2787FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx);
2788/* Opcode 0xf3 0x0f 0x64 - invalid */
2789
2790/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
2791FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq);
2792/** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */
2793FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx);
2794/* Opcode 0xf3 0x0f 0x65 - invalid */
2795
2796/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
2797FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq);
2798/** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */
2799FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx);
2800/* Opcode 0xf3 0x0f 0x66 - invalid */
2801
2802/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
2803FNIEMOP_STUB(iemOp_packuswb_Pq_Qq);
2804/** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */
2805FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W);
2806/* Opcode 0xf3 0x0f 0x67 - invalid */
2807
2808
2809/**
2810 * Common worker for SSE2 and MMX instructions on the forms:
2811 * pxxxx xmm1, xmm2/mem128
2812 * pxxxx mm1, mm2/mem64
2813 *
2814 * The 2nd operand is the second half of a register, which in the memory case
2815 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2816 * where it may read the full 128 bits or only the upper 64 bits.
2817 *
2818 * Exceptions type 4.
2819 */
2820FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2821{
2822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2823 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2824 {
2825 case IEM_OP_PRF_SIZE_OP: /* SSE */
2826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2827 {
2828 /*
2829 * Register, register.
2830 */
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_BEGIN(2, 0);
2833 IEM_MC_ARG(uint128_t *, pDst, 0);
2834 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2835 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2836 IEM_MC_PREPARE_SSE_USAGE();
2837 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2838 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2839 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 else
2844 {
2845 /*
2846 * Register, memory.
2847 */
2848 IEM_MC_BEGIN(2, 2);
2849 IEM_MC_ARG(uint128_t *, pDst, 0);
2850 IEM_MC_LOCAL(uint128_t, uSrc);
2851 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2853
2854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2856 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2857 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2858
2859 IEM_MC_PREPARE_SSE_USAGE();
2860 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2861 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2862
2863 IEM_MC_ADVANCE_RIP();
2864 IEM_MC_END();
2865 }
2866 return VINF_SUCCESS;
2867
2868 case 0: /* MMX */
2869 if (!pImpl->pfnU64)
2870 return IEMOP_RAISE_INVALID_OPCODE();
2871 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2872 {
2873 /*
2874 * Register, register.
2875 */
2876 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2877 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_BEGIN(2, 0);
2880 IEM_MC_ARG(uint64_t *, pDst, 0);
2881 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2882 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2883 IEM_MC_PREPARE_FPU_USAGE();
2884 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2885 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2886 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2887 IEM_MC_ADVANCE_RIP();
2888 IEM_MC_END();
2889 }
2890 else
2891 {
2892 /*
2893 * Register, memory.
2894 */
2895 IEM_MC_BEGIN(2, 2);
2896 IEM_MC_ARG(uint64_t *, pDst, 0);
2897 IEM_MC_LOCAL(uint64_t, uSrc);
2898 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2900
2901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2903 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2904 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2905
2906 IEM_MC_PREPARE_FPU_USAGE();
2907 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2908 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2909
2910 IEM_MC_ADVANCE_RIP();
2911 IEM_MC_END();
2912 }
2913 return VINF_SUCCESS;
2914
2915 default:
2916 return IEMOP_RAISE_INVALID_OPCODE();
2917 }
2918}
2919
2920
2921/** Opcode 0x0f 0x68. */
2922FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2923{
2924 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2925 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2926}
2927
2928
2929/** Opcode 0x0f 0x69. */
2930FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2931{
2932 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2933 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2934}
2935
2936
2937/** Opcode 0x0f 0x6a. */
2938FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2939{
2940 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2941 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2942}
2943
2944/** Opcode 0x0f 0x6b. */
2945FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2946
2947
2948/* Opcode 0x0f 0x6c - invalid */
2949
2950/** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */
2951FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
2952{
2953 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");
2954 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2955}
2956
2957/* Opcode 0xf3 0x0f 0x6c - invalid */
2958/* Opcode 0xf2 0x0f 0x6c - invalid */
2959
2960
2961/** Opcode 0x0f 0x6d. */
2962FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2963{
2964 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2965 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2966}
2967
2968
2969/** Opcode 0x0f 0x6e. */
2970FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2971{
2972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2973 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2974 {
2975 case IEM_OP_PRF_SIZE_OP: /* SSE */
2976 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2977 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2978 else
2979 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2981 {
2982 /* XMM, greg*/
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2984 IEM_MC_BEGIN(0, 1);
2985 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2986 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2987 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2988 {
2989 IEM_MC_LOCAL(uint64_t, u64Tmp);
2990 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2991 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2992 }
2993 else
2994 {
2995 IEM_MC_LOCAL(uint32_t, u32Tmp);
2996 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2997 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2998 }
2999 IEM_MC_ADVANCE_RIP();
3000 IEM_MC_END();
3001 }
3002 else
3003 {
3004 /* XMM, [mem] */
3005 IEM_MC_BEGIN(0, 2);
3006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3007 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
3008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3011 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3012 {
3013 IEM_MC_LOCAL(uint64_t, u64Tmp);
3014 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3015 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
3016 }
3017 else
3018 {
3019 IEM_MC_LOCAL(uint32_t, u32Tmp);
3020 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3021 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
3022 }
3023 IEM_MC_ADVANCE_RIP();
3024 IEM_MC_END();
3025 }
3026 return VINF_SUCCESS;
3027
3028 case 0: /* MMX */
3029 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3030 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
3031 else
3032 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
3033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3034 {
3035 /* MMX, greg */
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_BEGIN(0, 1);
3038 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3039 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3040 IEM_MC_LOCAL(uint64_t, u64Tmp);
3041 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3042 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3043 else
3044 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3045 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3046 IEM_MC_ADVANCE_RIP();
3047 IEM_MC_END();
3048 }
3049 else
3050 {
3051 /* MMX, [mem] */
3052 IEM_MC_BEGIN(0, 2);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3054 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3057 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3058 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3059 {
3060 IEM_MC_LOCAL(uint64_t, u64Tmp);
3061 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3062 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3063 }
3064 else
3065 {
3066 IEM_MC_LOCAL(uint32_t, u32Tmp);
3067 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3068 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
3069 }
3070 IEM_MC_ADVANCE_RIP();
3071 IEM_MC_END();
3072 }
3073 return VINF_SUCCESS;
3074
3075 default:
3076 return IEMOP_RAISE_INVALID_OPCODE();
3077 }
3078}
3079
3080
3081/** Opcode 0x0f 0x6f. */
3082FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
3083{
3084 bool fAligned = false;
3085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3086 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3087 {
3088 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3089 fAligned = true;
3090 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3091 if (fAligned)
3092 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
3093 else
3094 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
3095 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3096 {
3097 /*
3098 * Register, register.
3099 */
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101 IEM_MC_BEGIN(0, 0);
3102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3104 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
3105 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3106 IEM_MC_ADVANCE_RIP();
3107 IEM_MC_END();
3108 }
3109 else
3110 {
3111 /*
3112 * Register, memory.
3113 */
3114 IEM_MC_BEGIN(0, 2);
3115 IEM_MC_LOCAL(uint128_t, u128Tmp);
3116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3117
3118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3122 if (fAligned)
3123 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3124 else
3125 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3126 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
3127
3128 IEM_MC_ADVANCE_RIP();
3129 IEM_MC_END();
3130 }
3131 return VINF_SUCCESS;
3132
3133 case 0: /* MMX */
3134 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
3135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3136 {
3137 /*
3138 * Register, register.
3139 */
3140 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3141 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3143 IEM_MC_BEGIN(0, 1);
3144 IEM_MC_LOCAL(uint64_t, u64Tmp);
3145 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3146 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3147 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
3148 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3149 IEM_MC_ADVANCE_RIP();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /*
3155 * Register, memory.
3156 */
3157 IEM_MC_BEGIN(0, 2);
3158 IEM_MC_LOCAL(uint64_t, u64Tmp);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160
3161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3164 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3165 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3166 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
3167
3168 IEM_MC_ADVANCE_RIP();
3169 IEM_MC_END();
3170 }
3171 return VINF_SUCCESS;
3172
3173 default:
3174 return IEMOP_RAISE_INVALID_OPCODE();
3175 }
3176}
3177
3178
3179/** Opcode 0x0f 0x70. The immediate here is evil! */
3180FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
3181{
3182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3183 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3184 {
3185 case IEM_OP_PRF_SIZE_OP: /* SSE */
3186 case IEM_OP_PRF_REPNZ: /* SSE */
3187 case IEM_OP_PRF_REPZ: /* SSE */
3188 {
3189 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
3190 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3191 {
3192 case IEM_OP_PRF_SIZE_OP:
3193 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
3194 pfnAImpl = iemAImpl_pshufd;
3195 break;
3196 case IEM_OP_PRF_REPNZ:
3197 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
3198 pfnAImpl = iemAImpl_pshuflw;
3199 break;
3200 case IEM_OP_PRF_REPZ:
3201 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
3202 pfnAImpl = iemAImpl_pshufhw;
3203 break;
3204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3205 }
3206 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3207 {
3208 /*
3209 * Register, register.
3210 */
3211 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3213
3214 IEM_MC_BEGIN(3, 0);
3215 IEM_MC_ARG(uint128_t *, pDst, 0);
3216 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3217 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3218 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3219 IEM_MC_PREPARE_SSE_USAGE();
3220 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3221 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3222 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3223 IEM_MC_ADVANCE_RIP();
3224 IEM_MC_END();
3225 }
3226 else
3227 {
3228 /*
3229 * Register, memory.
3230 */
3231 IEM_MC_BEGIN(3, 2);
3232 IEM_MC_ARG(uint128_t *, pDst, 0);
3233 IEM_MC_LOCAL(uint128_t, uSrc);
3234 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3235 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3236
3237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3238 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3239 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3241 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3242
3243 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3244 IEM_MC_PREPARE_SSE_USAGE();
3245 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3246 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
3247
3248 IEM_MC_ADVANCE_RIP();
3249 IEM_MC_END();
3250 }
3251 return VINF_SUCCESS;
3252 }
3253
3254 case 0: /* MMX Extension */
3255 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
3256 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3257 {
3258 /*
3259 * Register, register.
3260 */
3261 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263
3264 IEM_MC_BEGIN(3, 0);
3265 IEM_MC_ARG(uint64_t *, pDst, 0);
3266 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3267 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3268 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3269 IEM_MC_PREPARE_FPU_USAGE();
3270 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3271 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3272 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 }
3276 else
3277 {
3278 /*
3279 * Register, memory.
3280 */
3281 IEM_MC_BEGIN(3, 2);
3282 IEM_MC_ARG(uint64_t *, pDst, 0);
3283 IEM_MC_LOCAL(uint64_t, uSrc);
3284 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3286
3287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3288 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3289 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
3292
3293 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3294 IEM_MC_PREPARE_FPU_USAGE();
3295 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3296 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
3297
3298 IEM_MC_ADVANCE_RIP();
3299 IEM_MC_END();
3300 }
3301 return VINF_SUCCESS;
3302
3303 default:
3304 return IEMOP_RAISE_INVALID_OPCODE();
3305 }
3306}
3307
3308
3309/** Opcode 0x0f 0x71 11/2. */
3310FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
3311
3312/** Opcode 0x66 0x0f 0x71 11/2. */
3313FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
3314
3315/** Opcode 0x0f 0x71 11/4. */
3316FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
3317
3318/** Opcode 0x66 0x0f 0x71 11/4. */
3319FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
3320
3321/** Opcode 0x0f 0x71 11/6. */
3322FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
3323
3324/** Opcode 0x66 0x0f 0x71 11/6. */
3325FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
3326
3327
3328/** Opcode 0x0f 0x71. */
3329FNIEMOP_DEF(iemOp_Grp12)
3330{
3331 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3332 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3333 return IEMOP_RAISE_INVALID_OPCODE();
3334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3335 {
3336 case 0: case 1: case 3: case 5: case 7:
3337 return IEMOP_RAISE_INVALID_OPCODE();
3338 case 2:
3339 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3340 {
3341 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3342 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3343 default: return IEMOP_RAISE_INVALID_OPCODE();
3344 }
3345 case 4:
3346 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3347 {
3348 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3349 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3350 default: return IEMOP_RAISE_INVALID_OPCODE();
3351 }
3352 case 6:
3353 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3354 {
3355 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3356 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3357 default: return IEMOP_RAISE_INVALID_OPCODE();
3358 }
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361}
3362
3363
3364/** Opcode 0x0f 0x72 11/2. */
3365FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3366
3367/** Opcode 0x66 0x0f 0x72 11/2. */
3368FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3369
3370/** Opcode 0x0f 0x72 11/4. */
3371FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3372
3373/** Opcode 0x66 0x0f 0x72 11/4. */
3374FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3375
3376/** Opcode 0x0f 0x72 11/6. */
3377FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3378
3379/** Opcode 0x66 0x0f 0x72 11/6. */
3380FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3381
3382
3383/** Opcode 0x0f 0x72. */
3384FNIEMOP_DEF(iemOp_Grp13)
3385{
3386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3387 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3388 return IEMOP_RAISE_INVALID_OPCODE();
3389 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3390 {
3391 case 0: case 1: case 3: case 5: case 7:
3392 return IEMOP_RAISE_INVALID_OPCODE();
3393 case 2:
3394 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3395 {
3396 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3397 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3398 default: return IEMOP_RAISE_INVALID_OPCODE();
3399 }
3400 case 4:
3401 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3402 {
3403 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3404 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3405 default: return IEMOP_RAISE_INVALID_OPCODE();
3406 }
3407 case 6:
3408 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3409 {
3410 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3411 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3412 default: return IEMOP_RAISE_INVALID_OPCODE();
3413 }
3414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3415 }
3416}
3417
3418
3419/** Opcode 0x0f 0x73 11/2. */
3420FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3421
3422/** Opcode 0x66 0x0f 0x73 11/2. */
3423FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3424
3425/** Opcode 0x66 0x0f 0x73 11/3. */
3426FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3427
3428/** Opcode 0x0f 0x73 11/6. */
3429FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3430
3431/** Opcode 0x66 0x0f 0x73 11/6. */
3432FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3433
3434/** Opcode 0x66 0x0f 0x73 11/7. */
3435FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3436
3437
3438/** Opcode 0x0f 0x73. */
3439FNIEMOP_DEF(iemOp_Grp14)
3440{
3441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3442 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3443 return IEMOP_RAISE_INVALID_OPCODE();
3444 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3445 {
3446 case 0: case 1: case 4: case 5:
3447 return IEMOP_RAISE_INVALID_OPCODE();
3448 case 2:
3449 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3452 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3453 default: return IEMOP_RAISE_INVALID_OPCODE();
3454 }
3455 case 3:
3456 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3457 {
3458 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3459 default: return IEMOP_RAISE_INVALID_OPCODE();
3460 }
3461 case 6:
3462 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3463 {
3464 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3465 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3466 default: return IEMOP_RAISE_INVALID_OPCODE();
3467 }
3468 case 7:
3469 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3470 {
3471 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3472 default: return IEMOP_RAISE_INVALID_OPCODE();
3473 }
3474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3475 }
3476}
3477
3478
3479/**
3480 * Common worker for SSE2 and MMX instructions on the forms:
3481 * pxxx mm1, mm2/mem64
3482 * pxxx xmm1, xmm2/mem128
3483 *
3484 * Proper alignment of the 128-bit operand is enforced.
3485 * Exceptions type 4. SSE2 and MMX cpuid checks.
3486 */
3487FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3488{
3489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3490 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3491 {
3492 case IEM_OP_PRF_SIZE_OP: /* SSE */
3493 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3494 {
3495 /*
3496 * Register, register.
3497 */
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEM_MC_BEGIN(2, 0);
3500 IEM_MC_ARG(uint128_t *, pDst, 0);
3501 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3502 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3503 IEM_MC_PREPARE_SSE_USAGE();
3504 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3505 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3506 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 }
3510 else
3511 {
3512 /*
3513 * Register, memory.
3514 */
3515 IEM_MC_BEGIN(2, 2);
3516 IEM_MC_ARG(uint128_t *, pDst, 0);
3517 IEM_MC_LOCAL(uint128_t, uSrc);
3518 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3520
3521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3523 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3524 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3525
3526 IEM_MC_PREPARE_SSE_USAGE();
3527 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3528 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3529
3530 IEM_MC_ADVANCE_RIP();
3531 IEM_MC_END();
3532 }
3533 return VINF_SUCCESS;
3534
3535 case 0: /* MMX */
3536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3537 {
3538 /*
3539 * Register, register.
3540 */
3541 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3542 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_BEGIN(2, 0);
3545 IEM_MC_ARG(uint64_t *, pDst, 0);
3546 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3547 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3548 IEM_MC_PREPARE_FPU_USAGE();
3549 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3550 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3551 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3552 IEM_MC_ADVANCE_RIP();
3553 IEM_MC_END();
3554 }
3555 else
3556 {
3557 /*
3558 * Register, memory.
3559 */
3560 IEM_MC_BEGIN(2, 2);
3561 IEM_MC_ARG(uint64_t *, pDst, 0);
3562 IEM_MC_LOCAL(uint64_t, uSrc);
3563 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3565
3566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3568 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3569 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3570
3571 IEM_MC_PREPARE_FPU_USAGE();
3572 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3573 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3574
3575 IEM_MC_ADVANCE_RIP();
3576 IEM_MC_END();
3577 }
3578 return VINF_SUCCESS;
3579
3580 default:
3581 return IEMOP_RAISE_INVALID_OPCODE();
3582 }
3583}
3584
3585
3586/** Opcode 0x0f 0x74. */
3587FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3588{
3589 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3590 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3591}
3592
3593
3594/** Opcode 0x0f 0x75. */
3595FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3596{
3597 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3598 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3599}
3600
3601
3602/** Opcode 0x0f 0x76. */
3603FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3604{
3605 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3606 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3607}
3608
3609
3610/** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */
3611FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv);
3612/* Opcode 0x66 0x0f 0x77 - invalid */
3613/* Opcode 0xf3 0x0f 0x77 - invalid */
3614/* Opcode 0xf2 0x0f 0x77 - invalid */
3615
3616/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
3617FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
3618/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
3619FNIEMOP_STUB(iemOp_AmdGrp17);
3620/* Opcode 0xf3 0x0f 0x78 - invalid */
3621/* Opcode 0xf2 0x0f 0x78 - invalid */
3622
3623/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
3624FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
3625/* Opcode 0x66 0x0f 0x79 - invalid */
3626/* Opcode 0xf3 0x0f 0x79 - invalid */
3627/* Opcode 0xf2 0x0f 0x79 - invalid */
3628
3629/* Opcode 0x0f 0x7a - invalid */
3630/* Opcode 0x66 0x0f 0x7a - invalid */
3631/* Opcode 0xf3 0x0f 0x7a - invalid */
3632/* Opcode 0xf2 0x0f 0x7a - invalid */
3633
3634/* Opcode 0x0f 0x7b - invalid */
3635/* Opcode 0x66 0x0f 0x7b - invalid */
3636/* Opcode 0xf3 0x0f 0x7b - invalid */
3637/* Opcode 0xf2 0x0f 0x7b - invalid */
3638
3639/* Opcode 0x0f 0x7c - invalid */
3640/** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */
3641FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3642/* Opcode 0xf3 0x0f 0x7c - invalid */
3643/** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */
3644FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3645
3646/* Opcode 0x0f 0x7d - invalid */
3647/** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */
3648FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3649/* Opcode 0xf3 0x0f 0x7d - invalid */
3650/** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */
3651FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3652
3653
3654/** Opcode 0x0f 0x7e. */
3655FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3656{
3657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3658 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3659 {
3660 case IEM_OP_PRF_SIZE_OP: /* SSE */
3661 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3662 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3663 else
3664 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3666 {
3667 /* greg, XMM */
3668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3669 IEM_MC_BEGIN(0, 1);
3670 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3671 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3672 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3673 {
3674 IEM_MC_LOCAL(uint64_t, u64Tmp);
3675 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3676 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3677 }
3678 else
3679 {
3680 IEM_MC_LOCAL(uint32_t, u32Tmp);
3681 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3682 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3683 }
3684 IEM_MC_ADVANCE_RIP();
3685 IEM_MC_END();
3686 }
3687 else
3688 {
3689 /* [mem], XMM */
3690 IEM_MC_BEGIN(0, 2);
3691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3692 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3696 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3697 {
3698 IEM_MC_LOCAL(uint64_t, u64Tmp);
3699 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3700 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3701 }
3702 else
3703 {
3704 IEM_MC_LOCAL(uint32_t, u32Tmp);
3705 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3706 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3707 }
3708 IEM_MC_ADVANCE_RIP();
3709 IEM_MC_END();
3710 }
3711 return VINF_SUCCESS;
3712
3713 case 0: /* MMX */
3714 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3715 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3716 else
3717 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3719 {
3720 /* greg, MMX */
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_BEGIN(0, 1);
3723 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3725 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3726 {
3727 IEM_MC_LOCAL(uint64_t, u64Tmp);
3728 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3729 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3730 }
3731 else
3732 {
3733 IEM_MC_LOCAL(uint32_t, u32Tmp);
3734 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3735 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3736 }
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 }
3740 else
3741 {
3742 /* [mem], MMX */
3743 IEM_MC_BEGIN(0, 2);
3744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3745 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3748 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3749 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3750 {
3751 IEM_MC_LOCAL(uint64_t, u64Tmp);
3752 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3753 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3754 }
3755 else
3756 {
3757 IEM_MC_LOCAL(uint32_t, u32Tmp);
3758 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3759 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3760 }
3761 IEM_MC_ADVANCE_RIP();
3762 IEM_MC_END();
3763 }
3764 return VINF_SUCCESS;
3765
3766 default:
3767 return IEMOP_RAISE_INVALID_OPCODE();
3768 }
3769}
3770
3771
3772/** Opcode 0x0f 0x7f. */
3773FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3774{
3775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3776 bool fAligned = false;
3777 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3778 {
3779 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3780 fAligned = true;
3781 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3782 if (fAligned)
3783 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3784 else
3785 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3786 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3787 {
3788 /*
3789 * Register, register.
3790 */
3791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3792 IEM_MC_BEGIN(0, 0);
3793 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3794 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3795 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3796 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3797 IEM_MC_ADVANCE_RIP();
3798 IEM_MC_END();
3799 }
3800 else
3801 {
3802 /*
3803 * Register, memory.
3804 */
3805 IEM_MC_BEGIN(0, 2);
3806 IEM_MC_LOCAL(uint128_t, u128Tmp);
3807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3808
3809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3811 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3813
3814 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3815 if (fAligned)
3816 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3817 else
3818 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3819
3820 IEM_MC_ADVANCE_RIP();
3821 IEM_MC_END();
3822 }
3823 return VINF_SUCCESS;
3824
3825 case 0: /* MMX */
3826 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3827
3828 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3829 {
3830 /*
3831 * Register, register.
3832 */
3833 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3834 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_BEGIN(0, 1);
3837 IEM_MC_LOCAL(uint64_t, u64Tmp);
3838 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3839 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3840 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3841 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3842 IEM_MC_ADVANCE_RIP();
3843 IEM_MC_END();
3844 }
3845 else
3846 {
3847 /*
3848 * Register, memory.
3849 */
3850 IEM_MC_BEGIN(0, 2);
3851 IEM_MC_LOCAL(uint64_t, u64Tmp);
3852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3853
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3858
3859 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3860 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3861
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 }
3865 return VINF_SUCCESS;
3866
3867 default:
3868 return IEMOP_RAISE_INVALID_OPCODE();
3869 }
3870}
3871
3872
3873
3874/** Opcode 0x0f 0x80. */
3875FNIEMOP_DEF(iemOp_jo_Jv)
3876{
3877 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x81. */
3911FNIEMOP_DEF(iemOp_jno_Jv)
3912{
3913 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x82. */
3947FNIEMOP_DEF(iemOp_jc_Jv)
3948{
3949 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x83. */
3983FNIEMOP_DEF(iemOp_jnc_Jv)
3984{
3985 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x84. */
4019FNIEMOP_DEF(iemOp_je_Jv)
4020{
4021 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x85. */
4055FNIEMOP_DEF(iemOp_jne_Jv)
4056{
4057 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x86. */
4091FNIEMOP_DEF(iemOp_jbe_Jv)
4092{
4093 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
4094 IEMOP_HLP_MIN_386();
4095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4096 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4097 {
4098 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4100
4101 IEM_MC_BEGIN(0, 0);
4102 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4103 IEM_MC_REL_JMP_S16(i16Imm);
4104 } IEM_MC_ELSE() {
4105 IEM_MC_ADVANCE_RIP();
4106 } IEM_MC_ENDIF();
4107 IEM_MC_END();
4108 }
4109 else
4110 {
4111 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113
4114 IEM_MC_BEGIN(0, 0);
4115 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4116 IEM_MC_REL_JMP_S32(i32Imm);
4117 } IEM_MC_ELSE() {
4118 IEM_MC_ADVANCE_RIP();
4119 } IEM_MC_ENDIF();
4120 IEM_MC_END();
4121 }
4122 return VINF_SUCCESS;
4123}
4124
4125
4126/** Opcode 0x0f 0x87. */
4127FNIEMOP_DEF(iemOp_jnbe_Jv)
4128{
4129 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
4130 IEMOP_HLP_MIN_386();
4131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4132 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4133 {
4134 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4136
4137 IEM_MC_BEGIN(0, 0);
4138 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4139 IEM_MC_ADVANCE_RIP();
4140 } IEM_MC_ELSE() {
4141 IEM_MC_REL_JMP_S16(i16Imm);
4142 } IEM_MC_ENDIF();
4143 IEM_MC_END();
4144 }
4145 else
4146 {
4147 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4149
4150 IEM_MC_BEGIN(0, 0);
4151 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4152 IEM_MC_ADVANCE_RIP();
4153 } IEM_MC_ELSE() {
4154 IEM_MC_REL_JMP_S32(i32Imm);
4155 } IEM_MC_ENDIF();
4156 IEM_MC_END();
4157 }
4158 return VINF_SUCCESS;
4159}
4160
4161
4162/** Opcode 0x0f 0x88. */
4163FNIEMOP_DEF(iemOp_js_Jv)
4164{
4165 IEMOP_MNEMONIC(js_Jv, "js Jv");
4166 IEMOP_HLP_MIN_386();
4167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4168 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4169 {
4170 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4172
4173 IEM_MC_BEGIN(0, 0);
4174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4175 IEM_MC_REL_JMP_S16(i16Imm);
4176 } IEM_MC_ELSE() {
4177 IEM_MC_ADVANCE_RIP();
4178 } IEM_MC_ENDIF();
4179 IEM_MC_END();
4180 }
4181 else
4182 {
4183 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4185
4186 IEM_MC_BEGIN(0, 0);
4187 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4188 IEM_MC_REL_JMP_S32(i32Imm);
4189 } IEM_MC_ELSE() {
4190 IEM_MC_ADVANCE_RIP();
4191 } IEM_MC_ENDIF();
4192 IEM_MC_END();
4193 }
4194 return VINF_SUCCESS;
4195}
4196
4197
4198/** Opcode 0x0f 0x89. */
4199FNIEMOP_DEF(iemOp_jns_Jv)
4200{
4201 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
4202 IEMOP_HLP_MIN_386();
4203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4204 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4205 {
4206 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4208
4209 IEM_MC_BEGIN(0, 0);
4210 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4211 IEM_MC_ADVANCE_RIP();
4212 } IEM_MC_ELSE() {
4213 IEM_MC_REL_JMP_S16(i16Imm);
4214 } IEM_MC_ENDIF();
4215 IEM_MC_END();
4216 }
4217 else
4218 {
4219 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4221
4222 IEM_MC_BEGIN(0, 0);
4223 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4224 IEM_MC_ADVANCE_RIP();
4225 } IEM_MC_ELSE() {
4226 IEM_MC_REL_JMP_S32(i32Imm);
4227 } IEM_MC_ENDIF();
4228 IEM_MC_END();
4229 }
4230 return VINF_SUCCESS;
4231}
4232
4233
4234/** Opcode 0x0f 0x8a. */
4235FNIEMOP_DEF(iemOp_jp_Jv)
4236{
4237 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
4238 IEMOP_HLP_MIN_386();
4239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4240 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4241 {
4242 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4244
4245 IEM_MC_BEGIN(0, 0);
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4247 IEM_MC_REL_JMP_S16(i16Imm);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_ADVANCE_RIP();
4250 } IEM_MC_ENDIF();
4251 IEM_MC_END();
4252 }
4253 else
4254 {
4255 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4257
4258 IEM_MC_BEGIN(0, 0);
4259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4260 IEM_MC_REL_JMP_S32(i32Imm);
4261 } IEM_MC_ELSE() {
4262 IEM_MC_ADVANCE_RIP();
4263 } IEM_MC_ENDIF();
4264 IEM_MC_END();
4265 }
4266 return VINF_SUCCESS;
4267}
4268
4269
4270/** Opcode 0x0f 0x8b. */
4271FNIEMOP_DEF(iemOp_jnp_Jv)
4272{
4273 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
4274 IEMOP_HLP_MIN_386();
4275 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4276 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4277 {
4278 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4280
4281 IEM_MC_BEGIN(0, 0);
4282 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4283 IEM_MC_ADVANCE_RIP();
4284 } IEM_MC_ELSE() {
4285 IEM_MC_REL_JMP_S16(i16Imm);
4286 } IEM_MC_ENDIF();
4287 IEM_MC_END();
4288 }
4289 else
4290 {
4291 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4293
4294 IEM_MC_BEGIN(0, 0);
4295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4296 IEM_MC_ADVANCE_RIP();
4297 } IEM_MC_ELSE() {
4298 IEM_MC_REL_JMP_S32(i32Imm);
4299 } IEM_MC_ENDIF();
4300 IEM_MC_END();
4301 }
4302 return VINF_SUCCESS;
4303}
4304
4305
4306/** Opcode 0x0f 0x8c. */
4307FNIEMOP_DEF(iemOp_jl_Jv)
4308{
4309 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
4310 IEMOP_HLP_MIN_386();
4311 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4312 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4313 {
4314 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4316
4317 IEM_MC_BEGIN(0, 0);
4318 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4319 IEM_MC_REL_JMP_S16(i16Imm);
4320 } IEM_MC_ELSE() {
4321 IEM_MC_ADVANCE_RIP();
4322 } IEM_MC_ENDIF();
4323 IEM_MC_END();
4324 }
4325 else
4326 {
4327 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4329
4330 IEM_MC_BEGIN(0, 0);
4331 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4332 IEM_MC_REL_JMP_S32(i32Imm);
4333 } IEM_MC_ELSE() {
4334 IEM_MC_ADVANCE_RIP();
4335 } IEM_MC_ENDIF();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x8d. */
4343FNIEMOP_DEF(iemOp_jnl_Jv)
4344{
4345 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
4346 IEMOP_HLP_MIN_386();
4347 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4348 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4349 {
4350 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4352
4353 IEM_MC_BEGIN(0, 0);
4354 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4355 IEM_MC_ADVANCE_RIP();
4356 } IEM_MC_ELSE() {
4357 IEM_MC_REL_JMP_S16(i16Imm);
4358 } IEM_MC_ENDIF();
4359 IEM_MC_END();
4360 }
4361 else
4362 {
4363 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4365
4366 IEM_MC_BEGIN(0, 0);
4367 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4368 IEM_MC_ADVANCE_RIP();
4369 } IEM_MC_ELSE() {
4370 IEM_MC_REL_JMP_S32(i32Imm);
4371 } IEM_MC_ENDIF();
4372 IEM_MC_END();
4373 }
4374 return VINF_SUCCESS;
4375}
4376
4377
4378/** Opcode 0x0f 0x8e. */
4379FNIEMOP_DEF(iemOp_jle_Jv)
4380{
4381 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4382 IEMOP_HLP_MIN_386();
4383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4385 {
4386 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4388
4389 IEM_MC_BEGIN(0, 0);
4390 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4391 IEM_MC_REL_JMP_S16(i16Imm);
4392 } IEM_MC_ELSE() {
4393 IEM_MC_ADVANCE_RIP();
4394 } IEM_MC_ENDIF();
4395 IEM_MC_END();
4396 }
4397 else
4398 {
4399 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4401
4402 IEM_MC_BEGIN(0, 0);
4403 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4404 IEM_MC_REL_JMP_S32(i32Imm);
4405 } IEM_MC_ELSE() {
4406 IEM_MC_ADVANCE_RIP();
4407 } IEM_MC_ENDIF();
4408 IEM_MC_END();
4409 }
4410 return VINF_SUCCESS;
4411}
4412
4413
4414/** Opcode 0x0f 0x8f. */
4415FNIEMOP_DEF(iemOp_jnle_Jv)
4416{
4417 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4418 IEMOP_HLP_MIN_386();
4419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4420 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4421 {
4422 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4424
4425 IEM_MC_BEGIN(0, 0);
4426 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4427 IEM_MC_ADVANCE_RIP();
4428 } IEM_MC_ELSE() {
4429 IEM_MC_REL_JMP_S16(i16Imm);
4430 } IEM_MC_ENDIF();
4431 IEM_MC_END();
4432 }
4433 else
4434 {
4435 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4437
4438 IEM_MC_BEGIN(0, 0);
4439 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4440 IEM_MC_ADVANCE_RIP();
4441 } IEM_MC_ELSE() {
4442 IEM_MC_REL_JMP_S32(i32Imm);
4443 } IEM_MC_ENDIF();
4444 IEM_MC_END();
4445 }
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** Opcode 0x0f 0x90. */
4451FNIEMOP_DEF(iemOp_seto_Eb)
4452{
4453 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4454 IEMOP_HLP_MIN_386();
4455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4456
4457 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4458 * any way. AMD says it's "unused", whatever that means. We're
4459 * ignoring for now. */
4460 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4461 {
4462 /* register target */
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4466 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4467 } IEM_MC_ELSE() {
4468 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4469 } IEM_MC_ENDIF();
4470 IEM_MC_ADVANCE_RIP();
4471 IEM_MC_END();
4472 }
4473 else
4474 {
4475 /* memory target */
4476 IEM_MC_BEGIN(0, 1);
4477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4480 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4481 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4482 } IEM_MC_ELSE() {
4483 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4484 } IEM_MC_ENDIF();
4485 IEM_MC_ADVANCE_RIP();
4486 IEM_MC_END();
4487 }
4488 return VINF_SUCCESS;
4489}
4490
4491
4492/** Opcode 0x0f 0x91. */
4493FNIEMOP_DEF(iemOp_setno_Eb)
4494{
4495 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4496 IEMOP_HLP_MIN_386();
4497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4498
4499 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4500 * any way. AMD says it's "unused", whatever that means. We're
4501 * ignoring for now. */
4502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4503 {
4504 /* register target */
4505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4506 IEM_MC_BEGIN(0, 0);
4507 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4508 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4509 } IEM_MC_ELSE() {
4510 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4511 } IEM_MC_ENDIF();
4512 IEM_MC_ADVANCE_RIP();
4513 IEM_MC_END();
4514 }
4515 else
4516 {
4517 /* memory target */
4518 IEM_MC_BEGIN(0, 1);
4519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4522 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4523 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4524 } IEM_MC_ELSE() {
4525 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4526 } IEM_MC_ENDIF();
4527 IEM_MC_ADVANCE_RIP();
4528 IEM_MC_END();
4529 }
4530 return VINF_SUCCESS;
4531}
4532
4533
4534/** Opcode 0x0f 0x92. */
4535FNIEMOP_DEF(iemOp_setc_Eb)
4536{
4537 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4538 IEMOP_HLP_MIN_386();
4539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4540
4541 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4542 * any way. AMD says it's "unused", whatever that means. We're
4543 * ignoring for now. */
4544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4545 {
4546 /* register target */
4547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4548 IEM_MC_BEGIN(0, 0);
4549 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4550 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4551 } IEM_MC_ELSE() {
4552 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4553 } IEM_MC_ENDIF();
4554 IEM_MC_ADVANCE_RIP();
4555 IEM_MC_END();
4556 }
4557 else
4558 {
4559 /* memory target */
4560 IEM_MC_BEGIN(0, 1);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4564 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4565 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4566 } IEM_MC_ELSE() {
4567 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4568 } IEM_MC_ENDIF();
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 }
4572 return VINF_SUCCESS;
4573}
4574
4575
4576/** Opcode 0x0f 0x93. */
4577FNIEMOP_DEF(iemOp_setnc_Eb)
4578{
4579 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4580 IEMOP_HLP_MIN_386();
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582
4583 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4584 * any way. AMD says it's "unused", whatever that means. We're
4585 * ignoring for now. */
4586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4587 {
4588 /* register target */
4589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4590 IEM_MC_BEGIN(0, 0);
4591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4592 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4593 } IEM_MC_ELSE() {
4594 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4595 } IEM_MC_ENDIF();
4596 IEM_MC_ADVANCE_RIP();
4597 IEM_MC_END();
4598 }
4599 else
4600 {
4601 /* memory target */
4602 IEM_MC_BEGIN(0, 1);
4603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4606 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4607 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4608 } IEM_MC_ELSE() {
4609 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4610 } IEM_MC_ENDIF();
4611 IEM_MC_ADVANCE_RIP();
4612 IEM_MC_END();
4613 }
4614 return VINF_SUCCESS;
4615}
4616
4617
4618/** Opcode 0x0f 0x94. */
4619FNIEMOP_DEF(iemOp_sete_Eb)
4620{
4621 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4622 IEMOP_HLP_MIN_386();
4623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4624
4625 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4626 * any way. AMD says it's "unused", whatever that means. We're
4627 * ignoring for now. */
4628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4629 {
4630 /* register target */
4631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4632 IEM_MC_BEGIN(0, 0);
4633 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4634 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4635 } IEM_MC_ELSE() {
4636 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4637 } IEM_MC_ENDIF();
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 }
4641 else
4642 {
4643 /* memory target */
4644 IEM_MC_BEGIN(0, 1);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4648 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4649 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4650 } IEM_MC_ELSE() {
4651 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4652 } IEM_MC_ENDIF();
4653 IEM_MC_ADVANCE_RIP();
4654 IEM_MC_END();
4655 }
4656 return VINF_SUCCESS;
4657}
4658
4659
4660/** Opcode 0x0f 0x95. */
4661FNIEMOP_DEF(iemOp_setne_Eb)
4662{
4663 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4664 IEMOP_HLP_MIN_386();
4665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4666
4667 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4668 * any way. AMD says it's "unused", whatever that means. We're
4669 * ignoring for now. */
4670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4671 {
4672 /* register target */
4673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4674 IEM_MC_BEGIN(0, 0);
4675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4676 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4677 } IEM_MC_ELSE() {
4678 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4679 } IEM_MC_ENDIF();
4680 IEM_MC_ADVANCE_RIP();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 /* memory target */
4686 IEM_MC_BEGIN(0, 1);
4687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4691 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4692 } IEM_MC_ELSE() {
4693 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4694 } IEM_MC_ENDIF();
4695 IEM_MC_ADVANCE_RIP();
4696 IEM_MC_END();
4697 }
4698 return VINF_SUCCESS;
4699}
4700
4701
4702/** Opcode 0x0f 0x96. */
4703FNIEMOP_DEF(iemOp_setbe_Eb)
4704{
4705 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4706 IEMOP_HLP_MIN_386();
4707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4708
4709 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4710 * any way. AMD says it's "unused", whatever that means. We're
4711 * ignoring for now. */
4712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4713 {
4714 /* register target */
4715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4716 IEM_MC_BEGIN(0, 0);
4717 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4718 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4719 } IEM_MC_ELSE() {
4720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4721 } IEM_MC_ENDIF();
4722 IEM_MC_ADVANCE_RIP();
4723 IEM_MC_END();
4724 }
4725 else
4726 {
4727 /* memory target */
4728 IEM_MC_BEGIN(0, 1);
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4732 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4733 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4734 } IEM_MC_ELSE() {
4735 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4736 } IEM_MC_ENDIF();
4737 IEM_MC_ADVANCE_RIP();
4738 IEM_MC_END();
4739 }
4740 return VINF_SUCCESS;
4741}
4742
4743
4744/** Opcode 0x0f 0x97. */
4745FNIEMOP_DEF(iemOp_setnbe_Eb)
4746{
4747 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4748 IEMOP_HLP_MIN_386();
4749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4750
4751 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4752 * any way. AMD says it's "unused", whatever that means. We're
4753 * ignoring for now. */
4754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4755 {
4756 /* register target */
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4758 IEM_MC_BEGIN(0, 0);
4759 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4761 } IEM_MC_ELSE() {
4762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4763 } IEM_MC_ENDIF();
4764 IEM_MC_ADVANCE_RIP();
4765 IEM_MC_END();
4766 }
4767 else
4768 {
4769 /* memory target */
4770 IEM_MC_BEGIN(0, 1);
4771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4774 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4775 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4776 } IEM_MC_ELSE() {
4777 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4778 } IEM_MC_ENDIF();
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 }
4782 return VINF_SUCCESS;
4783}
4784
4785
4786/** Opcode 0x0f 0x98. */
4787FNIEMOP_DEF(iemOp_sets_Eb)
4788{
4789 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4790 IEMOP_HLP_MIN_386();
4791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4792
4793 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4794 * any way. AMD says it's "unused", whatever that means. We're
4795 * ignoring for now. */
4796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4797 {
4798 /* register target */
4799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4800 IEM_MC_BEGIN(0, 0);
4801 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4802 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4803 } IEM_MC_ELSE() {
4804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4805 } IEM_MC_ENDIF();
4806 IEM_MC_ADVANCE_RIP();
4807 IEM_MC_END();
4808 }
4809 else
4810 {
4811 /* memory target */
4812 IEM_MC_BEGIN(0, 1);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4817 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4818 } IEM_MC_ELSE() {
4819 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4820 } IEM_MC_ENDIF();
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 return VINF_SUCCESS;
4825}
4826
4827
4828/** Opcode 0x0f 0x99. */
4829FNIEMOP_DEF(iemOp_setns_Eb)
4830{
4831 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4832 IEMOP_HLP_MIN_386();
4833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4834
4835 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4836 * any way. AMD says it's "unused", whatever that means. We're
4837 * ignoring for now. */
4838 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4839 {
4840 /* register target */
4841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4842 IEM_MC_BEGIN(0, 0);
4843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4844 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4845 } IEM_MC_ELSE() {
4846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4847 } IEM_MC_ENDIF();
4848 IEM_MC_ADVANCE_RIP();
4849 IEM_MC_END();
4850 }
4851 else
4852 {
4853 /* memory target */
4854 IEM_MC_BEGIN(0, 1);
4855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4859 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4860 } IEM_MC_ELSE() {
4861 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4862 } IEM_MC_ENDIF();
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 }
4866 return VINF_SUCCESS;
4867}
4868
4869
4870/** Opcode 0x0f 0x9a. */
4871FNIEMOP_DEF(iemOp_setp_Eb)
4872{
4873 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4874 IEMOP_HLP_MIN_386();
4875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4876
4877 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4878 * any way. AMD says it's "unused", whatever that means. We're
4879 * ignoring for now. */
4880 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4881 {
4882 /* register target */
4883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4884 IEM_MC_BEGIN(0, 0);
4885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4887 } IEM_MC_ELSE() {
4888 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4889 } IEM_MC_ENDIF();
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 }
4893 else
4894 {
4895 /* memory target */
4896 IEM_MC_BEGIN(0, 1);
4897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4900 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4901 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4902 } IEM_MC_ELSE() {
4903 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4904 } IEM_MC_ENDIF();
4905 IEM_MC_ADVANCE_RIP();
4906 IEM_MC_END();
4907 }
4908 return VINF_SUCCESS;
4909}
4910
4911
4912/** Opcode 0x0f 0x9b. */
4913FNIEMOP_DEF(iemOp_setnp_Eb)
4914{
4915 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4916 IEMOP_HLP_MIN_386();
4917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4918
4919 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4920 * any way. AMD says it's "unused", whatever that means. We're
4921 * ignoring for now. */
4922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4923 {
4924 /* register target */
4925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4926 IEM_MC_BEGIN(0, 0);
4927 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4928 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4929 } IEM_MC_ELSE() {
4930 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4931 } IEM_MC_ENDIF();
4932 IEM_MC_ADVANCE_RIP();
4933 IEM_MC_END();
4934 }
4935 else
4936 {
4937 /* memory target */
4938 IEM_MC_BEGIN(0, 1);
4939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4943 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4944 } IEM_MC_ELSE() {
4945 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4946 } IEM_MC_ENDIF();
4947 IEM_MC_ADVANCE_RIP();
4948 IEM_MC_END();
4949 }
4950 return VINF_SUCCESS;
4951}
4952
4953
4954/** Opcode 0x0f 0x9c. */
4955FNIEMOP_DEF(iemOp_setl_Eb)
4956{
4957 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4958 IEMOP_HLP_MIN_386();
4959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4960
4961 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4962 * any way. AMD says it's "unused", whatever that means. We're
4963 * ignoring for now. */
4964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4965 {
4966 /* register target */
4967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4968 IEM_MC_BEGIN(0, 0);
4969 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4970 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4971 } IEM_MC_ELSE() {
4972 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4973 } IEM_MC_ENDIF();
4974 IEM_MC_ADVANCE_RIP();
4975 IEM_MC_END();
4976 }
4977 else
4978 {
4979 /* memory target */
4980 IEM_MC_BEGIN(0, 1);
4981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4984 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4985 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4986 } IEM_MC_ELSE() {
4987 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4988 } IEM_MC_ENDIF();
4989 IEM_MC_ADVANCE_RIP();
4990 IEM_MC_END();
4991 }
4992 return VINF_SUCCESS;
4993}
4994
4995
4996/** Opcode 0x0f 0x9d. */
4997FNIEMOP_DEF(iemOp_setnl_Eb)
4998{
4999 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
5000 IEMOP_HLP_MIN_386();
5001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5002
5003 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5004 * any way. AMD says it's "unused", whatever that means. We're
5005 * ignoring for now. */
5006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5007 {
5008 /* register target */
5009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5010 IEM_MC_BEGIN(0, 0);
5011 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5012 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5013 } IEM_MC_ELSE() {
5014 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5015 } IEM_MC_ENDIF();
5016 IEM_MC_ADVANCE_RIP();
5017 IEM_MC_END();
5018 }
5019 else
5020 {
5021 /* memory target */
5022 IEM_MC_BEGIN(0, 1);
5023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5026 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
5027 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5028 } IEM_MC_ELSE() {
5029 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5030 } IEM_MC_ENDIF();
5031 IEM_MC_ADVANCE_RIP();
5032 IEM_MC_END();
5033 }
5034 return VINF_SUCCESS;
5035}
5036
5037
5038/** Opcode 0x0f 0x9e. */
5039FNIEMOP_DEF(iemOp_setle_Eb)
5040{
5041 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
5042 IEMOP_HLP_MIN_386();
5043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5044
5045 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5046 * any way. AMD says it's "unused", whatever that means. We're
5047 * ignoring for now. */
5048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5049 {
5050 /* register target */
5051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5052 IEM_MC_BEGIN(0, 0);
5053 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5054 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5055 } IEM_MC_ELSE() {
5056 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5057 } IEM_MC_ENDIF();
5058 IEM_MC_ADVANCE_RIP();
5059 IEM_MC_END();
5060 }
5061 else
5062 {
5063 /* memory target */
5064 IEM_MC_BEGIN(0, 1);
5065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5068 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5069 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5070 } IEM_MC_ELSE() {
5071 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5072 } IEM_MC_ENDIF();
5073 IEM_MC_ADVANCE_RIP();
5074 IEM_MC_END();
5075 }
5076 return VINF_SUCCESS;
5077}
5078
5079
5080/** Opcode 0x0f 0x9f. */
5081FNIEMOP_DEF(iemOp_setnle_Eb)
5082{
5083 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
5084 IEMOP_HLP_MIN_386();
5085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5086
5087 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
5088 * any way. AMD says it's "unused", whatever that means. We're
5089 * ignoring for now. */
5090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5091 {
5092 /* register target */
5093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5094 IEM_MC_BEGIN(0, 0);
5095 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5096 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
5097 } IEM_MC_ELSE() {
5098 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
5099 } IEM_MC_ENDIF();
5100 IEM_MC_ADVANCE_RIP();
5101 IEM_MC_END();
5102 }
5103 else
5104 {
5105 /* memory target */
5106 IEM_MC_BEGIN(0, 1);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5110 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
5111 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5112 } IEM_MC_ELSE() {
5113 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
5114 } IEM_MC_ENDIF();
5115 IEM_MC_ADVANCE_RIP();
5116 IEM_MC_END();
5117 }
5118 return VINF_SUCCESS;
5119}
5120
5121
5122/**
5123 * Common 'push segment-register' helper.
5124 */
5125FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
5126{
5127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5128 if (iReg < X86_SREG_FS)
5129 IEMOP_HLP_NO_64BIT();
5130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5131
5132 switch (pVCpu->iem.s.enmEffOpSize)
5133 {
5134 case IEMMODE_16BIT:
5135 IEM_MC_BEGIN(0, 1);
5136 IEM_MC_LOCAL(uint16_t, u16Value);
5137 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
5138 IEM_MC_PUSH_U16(u16Value);
5139 IEM_MC_ADVANCE_RIP();
5140 IEM_MC_END();
5141 break;
5142
5143 case IEMMODE_32BIT:
5144 IEM_MC_BEGIN(0, 1);
5145 IEM_MC_LOCAL(uint32_t, u32Value);
5146 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
5147 IEM_MC_PUSH_U32_SREG(u32Value);
5148 IEM_MC_ADVANCE_RIP();
5149 IEM_MC_END();
5150 break;
5151
5152 case IEMMODE_64BIT:
5153 IEM_MC_BEGIN(0, 1);
5154 IEM_MC_LOCAL(uint64_t, u64Value);
5155 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
5156 IEM_MC_PUSH_U64(u64Value);
5157 IEM_MC_ADVANCE_RIP();
5158 IEM_MC_END();
5159 break;
5160 }
5161
5162 return VINF_SUCCESS;
5163}
5164
5165
5166/** Opcode 0x0f 0xa0. */
5167FNIEMOP_DEF(iemOp_push_fs)
5168{
5169 IEMOP_MNEMONIC(push_fs, "push fs");
5170 IEMOP_HLP_MIN_386();
5171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5172 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
5173}
5174
5175
5176/** Opcode 0x0f 0xa1. */
5177FNIEMOP_DEF(iemOp_pop_fs)
5178{
5179 IEMOP_MNEMONIC(pop_fs, "pop fs");
5180 IEMOP_HLP_MIN_386();
5181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5182 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
5183}
5184
5185
5186/** Opcode 0x0f 0xa2. */
5187FNIEMOP_DEF(iemOp_cpuid)
5188{
5189 IEMOP_MNEMONIC(cpuid, "cpuid");
5190 IEMOP_HLP_MIN_486(); /* not all 486es. */
5191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5192 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
5193}
5194
5195
5196/**
5197 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
5198 * iemOp_bts_Ev_Gv.
5199 */
5200FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
5201{
5202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5204
5205 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5206 {
5207 /* register destination. */
5208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 IEM_MC_BEGIN(3, 0);
5213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5214 IEM_MC_ARG(uint16_t, u16Src, 1);
5215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5216
5217 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5218 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
5219 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5220 IEM_MC_REF_EFLAGS(pEFlags);
5221 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5222
5223 IEM_MC_ADVANCE_RIP();
5224 IEM_MC_END();
5225 return VINF_SUCCESS;
5226
5227 case IEMMODE_32BIT:
5228 IEM_MC_BEGIN(3, 0);
5229 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5230 IEM_MC_ARG(uint32_t, u32Src, 1);
5231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5232
5233 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5234 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
5235 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5236 IEM_MC_REF_EFLAGS(pEFlags);
5237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5238
5239 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5240 IEM_MC_ADVANCE_RIP();
5241 IEM_MC_END();
5242 return VINF_SUCCESS;
5243
5244 case IEMMODE_64BIT:
5245 IEM_MC_BEGIN(3, 0);
5246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5247 IEM_MC_ARG(uint64_t, u64Src, 1);
5248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5249
5250 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5251 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
5252 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5253 IEM_MC_REF_EFLAGS(pEFlags);
5254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5255
5256 IEM_MC_ADVANCE_RIP();
5257 IEM_MC_END();
5258 return VINF_SUCCESS;
5259
5260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5261 }
5262 }
5263 else
5264 {
5265 /* memory destination. */
5266
5267 uint32_t fAccess;
5268 if (pImpl->pfnLockedU16)
5269 fAccess = IEM_ACCESS_DATA_RW;
5270 else /* BT */
5271 fAccess = IEM_ACCESS_DATA_R;
5272
5273 /** @todo test negative bit offsets! */
5274 switch (pVCpu->iem.s.enmEffOpSize)
5275 {
5276 case IEMMODE_16BIT:
5277 IEM_MC_BEGIN(3, 2);
5278 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5279 IEM_MC_ARG(uint16_t, u16Src, 1);
5280 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5282 IEM_MC_LOCAL(int16_t, i16AddrAdj);
5283
5284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5285 if (pImpl->pfnLockedU16)
5286 IEMOP_HLP_DONE_DECODING();
5287 else
5288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5289 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5290 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
5291 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
5292 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
5293 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
5294 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
5295 IEM_MC_FETCH_EFLAGS(EFlags);
5296
5297 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5300 else
5301 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5302 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5303
5304 IEM_MC_COMMIT_EFLAGS(EFlags);
5305 IEM_MC_ADVANCE_RIP();
5306 IEM_MC_END();
5307 return VINF_SUCCESS;
5308
5309 case IEMMODE_32BIT:
5310 IEM_MC_BEGIN(3, 2);
5311 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5312 IEM_MC_ARG(uint32_t, u32Src, 1);
5313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5315 IEM_MC_LOCAL(int32_t, i32AddrAdj);
5316
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5318 if (pImpl->pfnLockedU16)
5319 IEMOP_HLP_DONE_DECODING();
5320 else
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5323 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
5324 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
5325 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
5326 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
5327 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
5328 IEM_MC_FETCH_EFLAGS(EFlags);
5329
5330 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5331 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5333 else
5334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5335 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5336
5337 IEM_MC_COMMIT_EFLAGS(EFlags);
5338 IEM_MC_ADVANCE_RIP();
5339 IEM_MC_END();
5340 return VINF_SUCCESS;
5341
5342 case IEMMODE_64BIT:
5343 IEM_MC_BEGIN(3, 2);
5344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5345 IEM_MC_ARG(uint64_t, u64Src, 1);
5346 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5348 IEM_MC_LOCAL(int64_t, i64AddrAdj);
5349
5350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5351 if (pImpl->pfnLockedU16)
5352 IEMOP_HLP_DONE_DECODING();
5353 else
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5356 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
5357 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
5358 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
5359 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5360 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5361 IEM_MC_FETCH_EFLAGS(EFlags);
5362
5363 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5364 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5365 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5366 else
5367 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5369
5370 IEM_MC_COMMIT_EFLAGS(EFlags);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 }
5378}
5379
5380
5381/** Opcode 0x0f 0xa3. */
5382FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5383{
5384 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5385 IEMOP_HLP_MIN_386();
5386 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5387}
5388
5389
5390/**
5391 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5392 */
5393FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5394{
5395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5397
5398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5399 {
5400 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5402
5403 switch (pVCpu->iem.s.enmEffOpSize)
5404 {
5405 case IEMMODE_16BIT:
5406 IEM_MC_BEGIN(4, 0);
5407 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5408 IEM_MC_ARG(uint16_t, u16Src, 1);
5409 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5410 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5411
5412 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5413 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5414 IEM_MC_REF_EFLAGS(pEFlags);
5415 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5416
5417 IEM_MC_ADVANCE_RIP();
5418 IEM_MC_END();
5419 return VINF_SUCCESS;
5420
5421 case IEMMODE_32BIT:
5422 IEM_MC_BEGIN(4, 0);
5423 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5424 IEM_MC_ARG(uint32_t, u32Src, 1);
5425 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5426 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5427
5428 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5429 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5430 IEM_MC_REF_EFLAGS(pEFlags);
5431 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5432
5433 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5434 IEM_MC_ADVANCE_RIP();
5435 IEM_MC_END();
5436 return VINF_SUCCESS;
5437
5438 case IEMMODE_64BIT:
5439 IEM_MC_BEGIN(4, 0);
5440 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5441 IEM_MC_ARG(uint64_t, u64Src, 1);
5442 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5443 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5444
5445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5446 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5447 IEM_MC_REF_EFLAGS(pEFlags);
5448 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5449
5450 IEM_MC_ADVANCE_RIP();
5451 IEM_MC_END();
5452 return VINF_SUCCESS;
5453
5454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5455 }
5456 }
5457 else
5458 {
5459 switch (pVCpu->iem.s.enmEffOpSize)
5460 {
5461 case IEMMODE_16BIT:
5462 IEM_MC_BEGIN(4, 2);
5463 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5464 IEM_MC_ARG(uint16_t, u16Src, 1);
5465 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5466 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5468
5469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5470 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5471 IEM_MC_ASSIGN(cShiftArg, cShift);
5472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5473 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5474 IEM_MC_FETCH_EFLAGS(EFlags);
5475 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5476 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5477
5478 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5479 IEM_MC_COMMIT_EFLAGS(EFlags);
5480 IEM_MC_ADVANCE_RIP();
5481 IEM_MC_END();
5482 return VINF_SUCCESS;
5483
5484 case IEMMODE_32BIT:
5485 IEM_MC_BEGIN(4, 2);
5486 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5487 IEM_MC_ARG(uint32_t, u32Src, 1);
5488 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5489 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5491
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5493 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5494 IEM_MC_ASSIGN(cShiftArg, cShift);
5495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5496 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5497 IEM_MC_FETCH_EFLAGS(EFlags);
5498 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5499 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5500
5501 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5502 IEM_MC_COMMIT_EFLAGS(EFlags);
5503 IEM_MC_ADVANCE_RIP();
5504 IEM_MC_END();
5505 return VINF_SUCCESS;
5506
5507 case IEMMODE_64BIT:
5508 IEM_MC_BEGIN(4, 2);
5509 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5510 IEM_MC_ARG(uint64_t, u64Src, 1);
5511 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5512 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5514
5515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5516 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5517 IEM_MC_ASSIGN(cShiftArg, cShift);
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5519 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5520 IEM_MC_FETCH_EFLAGS(EFlags);
5521 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5522 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5523
5524 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5525 IEM_MC_COMMIT_EFLAGS(EFlags);
5526 IEM_MC_ADVANCE_RIP();
5527 IEM_MC_END();
5528 return VINF_SUCCESS;
5529
5530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5531 }
5532 }
5533}
5534
5535
5536/**
5537 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5538 */
5539FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5540{
5541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5542 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5543
5544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5545 {
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547
5548 switch (pVCpu->iem.s.enmEffOpSize)
5549 {
5550 case IEMMODE_16BIT:
5551 IEM_MC_BEGIN(4, 0);
5552 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5553 IEM_MC_ARG(uint16_t, u16Src, 1);
5554 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5555 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5556
5557 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5558 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5560 IEM_MC_REF_EFLAGS(pEFlags);
5561 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5562
5563 IEM_MC_ADVANCE_RIP();
5564 IEM_MC_END();
5565 return VINF_SUCCESS;
5566
5567 case IEMMODE_32BIT:
5568 IEM_MC_BEGIN(4, 0);
5569 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5570 IEM_MC_ARG(uint32_t, u32Src, 1);
5571 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5572 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5573
5574 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5575 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5577 IEM_MC_REF_EFLAGS(pEFlags);
5578 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5579
5580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5581 IEM_MC_ADVANCE_RIP();
5582 IEM_MC_END();
5583 return VINF_SUCCESS;
5584
5585 case IEMMODE_64BIT:
5586 IEM_MC_BEGIN(4, 0);
5587 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5588 IEM_MC_ARG(uint64_t, u64Src, 1);
5589 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5590 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5591
5592 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5593 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5594 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5595 IEM_MC_REF_EFLAGS(pEFlags);
5596 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5597
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 return VINF_SUCCESS;
5601
5602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5603 }
5604 }
5605 else
5606 {
5607 switch (pVCpu->iem.s.enmEffOpSize)
5608 {
5609 case IEMMODE_16BIT:
5610 IEM_MC_BEGIN(4, 2);
5611 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5612 IEM_MC_ARG(uint16_t, u16Src, 1);
5613 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5614 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5616
5617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5619 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5620 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5621 IEM_MC_FETCH_EFLAGS(EFlags);
5622 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5623 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5624
5625 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5626 IEM_MC_COMMIT_EFLAGS(EFlags);
5627 IEM_MC_ADVANCE_RIP();
5628 IEM_MC_END();
5629 return VINF_SUCCESS;
5630
5631 case IEMMODE_32BIT:
5632 IEM_MC_BEGIN(4, 2);
5633 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5634 IEM_MC_ARG(uint32_t, u32Src, 1);
5635 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5636 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638
5639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5641 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5642 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5643 IEM_MC_FETCH_EFLAGS(EFlags);
5644 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5645 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5646
5647 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5648 IEM_MC_COMMIT_EFLAGS(EFlags);
5649 IEM_MC_ADVANCE_RIP();
5650 IEM_MC_END();
5651 return VINF_SUCCESS;
5652
5653 case IEMMODE_64BIT:
5654 IEM_MC_BEGIN(4, 2);
5655 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5656 IEM_MC_ARG(uint64_t, u64Src, 1);
5657 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5660
5661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5663 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5664 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5665 IEM_MC_FETCH_EFLAGS(EFlags);
5666 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5667 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5668
5669 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5670 IEM_MC_COMMIT_EFLAGS(EFlags);
5671 IEM_MC_ADVANCE_RIP();
5672 IEM_MC_END();
5673 return VINF_SUCCESS;
5674
5675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5676 }
5677 }
5678}
5679
5680
5681
5682/** Opcode 0x0f 0xa4. */
5683FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5684{
5685 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5686 IEMOP_HLP_MIN_386();
5687 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5688}
5689
5690
5691/** Opcode 0x0f 0xa5. */
5692FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5693{
5694 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5695 IEMOP_HLP_MIN_386();
5696 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5697}
5698
5699
5700/** Opcode 0x0f 0xa8. */
5701FNIEMOP_DEF(iemOp_push_gs)
5702{
5703 IEMOP_MNEMONIC(push_gs, "push gs");
5704 IEMOP_HLP_MIN_386();
5705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5706 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5707}
5708
5709
5710/** Opcode 0x0f 0xa9. */
5711FNIEMOP_DEF(iemOp_pop_gs)
5712{
5713 IEMOP_MNEMONIC(pop_gs, "pop gs");
5714 IEMOP_HLP_MIN_386();
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5717}
5718
5719
5720/** Opcode 0x0f 0xaa. */
5721FNIEMOP_STUB(iemOp_rsm);
5722//IEMOP_HLP_MIN_386();
5723
5724
5725/** Opcode 0x0f 0xab. */
5726FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5727{
5728 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5729 IEMOP_HLP_MIN_386();
5730 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5731}
5732
5733
5734/** Opcode 0x0f 0xac. */
5735FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5736{
5737 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5738 IEMOP_HLP_MIN_386();
5739 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5740}
5741
5742
5743/** Opcode 0x0f 0xad. */
5744FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5745{
5746 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5747 IEMOP_HLP_MIN_386();
5748 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5749}
5750
5751
5752/** Opcode 0x0f 0xae mem/0. */
5753FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5754{
5755 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5756 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5757 return IEMOP_RAISE_INVALID_OPCODE();
5758
5759 IEM_MC_BEGIN(3, 1);
5760 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5761 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5762 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5766 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5767 IEM_MC_END();
5768 return VINF_SUCCESS;
5769}
5770
5771
5772/** Opcode 0x0f 0xae mem/1. */
5773FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5774{
5775 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5776 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5777 return IEMOP_RAISE_INVALID_OPCODE();
5778
5779 IEM_MC_BEGIN(3, 1);
5780 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5781 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5782 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5783 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5785 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5786 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5787 IEM_MC_END();
5788 return VINF_SUCCESS;
5789}
5790
5791
5792/** Opcode 0x0f 0xae mem/2. */
5793FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5794
5795/** Opcode 0x0f 0xae mem/3. */
5796FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5797
5798/** Opcode 0x0f 0xae mem/4. */
5799FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5800
5801/** Opcode 0x0f 0xae mem/5. */
5802FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5803
5804/** Opcode 0x0f 0xae mem/6. */
5805FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5806
5807/** Opcode 0x0f 0xae mem/7. */
5808FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5809
5810
5811/** Opcode 0x0f 0xae 11b/5. */
5812FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5813{
5814 RT_NOREF_PV(bRm);
5815 IEMOP_MNEMONIC(lfence, "lfence");
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5817 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5818 return IEMOP_RAISE_INVALID_OPCODE();
5819
5820 IEM_MC_BEGIN(0, 0);
5821 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5822 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5823 else
5824 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5825 IEM_MC_ADVANCE_RIP();
5826 IEM_MC_END();
5827 return VINF_SUCCESS;
5828}
5829
5830
5831/** Opcode 0x0f 0xae 11b/6. */
5832FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5833{
5834 RT_NOREF_PV(bRm);
5835 IEMOP_MNEMONIC(mfence, "mfence");
5836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5837 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5838 return IEMOP_RAISE_INVALID_OPCODE();
5839
5840 IEM_MC_BEGIN(0, 0);
5841 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5842 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5843 else
5844 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5845 IEM_MC_ADVANCE_RIP();
5846 IEM_MC_END();
5847 return VINF_SUCCESS;
5848}
5849
5850
5851/** Opcode 0x0f 0xae 11b/7. */
5852FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5853{
5854 RT_NOREF_PV(bRm);
5855 IEMOP_MNEMONIC(sfence, "sfence");
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5858 return IEMOP_RAISE_INVALID_OPCODE();
5859
5860 IEM_MC_BEGIN(0, 0);
5861 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5862 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5863 else
5864 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 return VINF_SUCCESS;
5868}
5869
5870
5871/** Opcode 0xf3 0x0f 0xae 11b/0. */
5872FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5873
5874/** Opcode 0xf3 0x0f 0xae 11b/1. */
5875FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5876
5877/** Opcode 0xf3 0x0f 0xae 11b/2. */
5878FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5879
5880/** Opcode 0xf3 0x0f 0xae 11b/3. */
5881FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5882
5883
5884/** Opcode 0x0f 0xae. */
5885FNIEMOP_DEF(iemOp_Grp15)
5886{
5887 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5889 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5890 {
5891 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5892 {
5893 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5894 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5895 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5896 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5897 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5898 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5899 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5900 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5902 }
5903 }
5904 else
5905 {
5906 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5907 {
5908 case 0:
5909 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5910 {
5911 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5912 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5913 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5914 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5915 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5916 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5917 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5918 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5920 }
5921 break;
5922
5923 case IEM_OP_PRF_REPZ:
5924 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5925 {
5926 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5927 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5928 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5929 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5930 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5931 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5932 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5933 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5935 }
5936 break;
5937
5938 default:
5939 return IEMOP_RAISE_INVALID_OPCODE();
5940 }
5941 }
5942}
5943
5944
5945/** Opcode 0x0f 0xaf. */
5946FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5947{
5948 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5949 IEMOP_HLP_MIN_386();
5950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5951 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5952}
5953
5954
5955/** Opcode 0x0f 0xb0. */
5956FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5957{
5958 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5959 IEMOP_HLP_MIN_486();
5960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5961
5962 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5963 {
5964 IEMOP_HLP_DONE_DECODING();
5965 IEM_MC_BEGIN(4, 0);
5966 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5967 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5968 IEM_MC_ARG(uint8_t, u8Src, 2);
5969 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5970
5971 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5972 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5973 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5974 IEM_MC_REF_EFLAGS(pEFlags);
5975 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5976 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5977 else
5978 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5979
5980 IEM_MC_ADVANCE_RIP();
5981 IEM_MC_END();
5982 }
5983 else
5984 {
5985 IEM_MC_BEGIN(4, 3);
5986 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5987 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5988 IEM_MC_ARG(uint8_t, u8Src, 2);
5989 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5991 IEM_MC_LOCAL(uint8_t, u8Al);
5992
5993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5994 IEMOP_HLP_DONE_DECODING();
5995 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5996 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5997 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5998 IEM_MC_FETCH_EFLAGS(EFlags);
5999 IEM_MC_REF_LOCAL(pu8Al, u8Al);
6000 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6001 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
6002 else
6003 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
6004
6005 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6006 IEM_MC_COMMIT_EFLAGS(EFlags);
6007 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
6008 IEM_MC_ADVANCE_RIP();
6009 IEM_MC_END();
6010 }
6011 return VINF_SUCCESS;
6012}
6013
6014/** Opcode 0x0f 0xb1. */
6015FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
6016{
6017 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
6018 IEMOP_HLP_MIN_486();
6019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6020
6021 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6022 {
6023 IEMOP_HLP_DONE_DECODING();
6024 switch (pVCpu->iem.s.enmEffOpSize)
6025 {
6026 case IEMMODE_16BIT:
6027 IEM_MC_BEGIN(4, 0);
6028 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6029 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6030 IEM_MC_ARG(uint16_t, u16Src, 2);
6031 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6032
6033 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6034 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6035 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
6036 IEM_MC_REF_EFLAGS(pEFlags);
6037 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6038 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6039 else
6040 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6041
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 case IEMMODE_32BIT:
6047 IEM_MC_BEGIN(4, 0);
6048 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6049 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6050 IEM_MC_ARG(uint32_t, u32Src, 2);
6051 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6052
6053 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6054 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6055 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
6056 IEM_MC_REF_EFLAGS(pEFlags);
6057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6058 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6059 else
6060 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6061
6062 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
6063 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 return VINF_SUCCESS;
6067
6068 case IEMMODE_64BIT:
6069 IEM_MC_BEGIN(4, 0);
6070 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6071 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6072#ifdef RT_ARCH_X86
6073 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6074#else
6075 IEM_MC_ARG(uint64_t, u64Src, 2);
6076#endif
6077 IEM_MC_ARG(uint32_t *, pEFlags, 3);
6078
6079 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6080 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
6081 IEM_MC_REF_EFLAGS(pEFlags);
6082#ifdef RT_ARCH_X86
6083 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6084 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6085 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6086 else
6087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6088#else
6089 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6090 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6091 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6092 else
6093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6094#endif
6095
6096 IEM_MC_ADVANCE_RIP();
6097 IEM_MC_END();
6098 return VINF_SUCCESS;
6099
6100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6101 }
6102 }
6103 else
6104 {
6105 switch (pVCpu->iem.s.enmEffOpSize)
6106 {
6107 case IEMMODE_16BIT:
6108 IEM_MC_BEGIN(4, 3);
6109 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6110 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
6111 IEM_MC_ARG(uint16_t, u16Src, 2);
6112 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6114 IEM_MC_LOCAL(uint16_t, u16Ax);
6115
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6117 IEMOP_HLP_DONE_DECODING();
6118 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6120 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
6121 IEM_MC_FETCH_EFLAGS(EFlags);
6122 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
6123 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6124 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
6125 else
6126 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
6127
6128 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6129 IEM_MC_COMMIT_EFLAGS(EFlags);
6130 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
6131 IEM_MC_ADVANCE_RIP();
6132 IEM_MC_END();
6133 return VINF_SUCCESS;
6134
6135 case IEMMODE_32BIT:
6136 IEM_MC_BEGIN(4, 3);
6137 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6138 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
6139 IEM_MC_ARG(uint32_t, u32Src, 2);
6140 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6142 IEM_MC_LOCAL(uint32_t, u32Eax);
6143
6144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6145 IEMOP_HLP_DONE_DECODING();
6146 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6147 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6148 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
6149 IEM_MC_FETCH_EFLAGS(EFlags);
6150 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
6151 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6152 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
6153 else
6154 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
6155
6156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6157 IEM_MC_COMMIT_EFLAGS(EFlags);
6158 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
6159 IEM_MC_ADVANCE_RIP();
6160 IEM_MC_END();
6161 return VINF_SUCCESS;
6162
6163 case IEMMODE_64BIT:
6164 IEM_MC_BEGIN(4, 3);
6165 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6166 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
6167#ifdef RT_ARCH_X86
6168 IEM_MC_ARG(uint64_t *, pu64Src, 2);
6169#else
6170 IEM_MC_ARG(uint64_t, u64Src, 2);
6171#endif
6172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
6173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6174 IEM_MC_LOCAL(uint64_t, u64Rax);
6175
6176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6177 IEMOP_HLP_DONE_DECODING();
6178 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6179 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
6180 IEM_MC_FETCH_EFLAGS(EFlags);
6181 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
6182#ifdef RT_ARCH_X86
6183 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6185 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
6186 else
6187 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
6188#else
6189 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6190 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6191 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
6192 else
6193 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
6194#endif
6195
6196 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6197 IEM_MC_COMMIT_EFLAGS(EFlags);
6198 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206}
6207
6208
6209FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
6210{
6211 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
6212 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
6213
6214 switch (pVCpu->iem.s.enmEffOpSize)
6215 {
6216 case IEMMODE_16BIT:
6217 IEM_MC_BEGIN(5, 1);
6218 IEM_MC_ARG(uint16_t, uSel, 0);
6219 IEM_MC_ARG(uint16_t, offSeg, 1);
6220 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6221 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6222 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6223 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6226 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6227 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
6228 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6229 IEM_MC_END();
6230 return VINF_SUCCESS;
6231
6232 case IEMMODE_32BIT:
6233 IEM_MC_BEGIN(5, 1);
6234 IEM_MC_ARG(uint16_t, uSel, 0);
6235 IEM_MC_ARG(uint32_t, offSeg, 1);
6236 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6237 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6238 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6239 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6242 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6243 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
6244 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6245 IEM_MC_END();
6246 return VINF_SUCCESS;
6247
6248 case IEMMODE_64BIT:
6249 IEM_MC_BEGIN(5, 1);
6250 IEM_MC_ARG(uint16_t, uSel, 0);
6251 IEM_MC_ARG(uint64_t, offSeg, 1);
6252 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
6253 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
6254 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
6255 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
6256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
6257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6258 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
6259 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6260 else
6261 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
6262 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
6263 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
6264 IEM_MC_END();
6265 return VINF_SUCCESS;
6266
6267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6268 }
6269}
6270
6271
6272/** Opcode 0x0f 0xb2. */
6273FNIEMOP_DEF(iemOp_lss_Gv_Mp)
6274{
6275 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
6276 IEMOP_HLP_MIN_386();
6277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6279 return IEMOP_RAISE_INVALID_OPCODE();
6280 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
6281}
6282
6283
6284/** Opcode 0x0f 0xb3. */
6285FNIEMOP_DEF(iemOp_btr_Ev_Gv)
6286{
6287 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
6288 IEMOP_HLP_MIN_386();
6289 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
6290}
6291
6292
6293/** Opcode 0x0f 0xb4. */
6294FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
6295{
6296 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
6297 IEMOP_HLP_MIN_386();
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6300 return IEMOP_RAISE_INVALID_OPCODE();
6301 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
6302}
6303
6304
6305/** Opcode 0x0f 0xb5. */
6306FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
6307{
6308 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
6309 IEMOP_HLP_MIN_386();
6310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6311 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6312 return IEMOP_RAISE_INVALID_OPCODE();
6313 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
6314}
6315
6316
6317/** Opcode 0x0f 0xb6. */
6318FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
6319{
6320 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
6321 IEMOP_HLP_MIN_386();
6322
6323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6324
6325 /*
6326 * If rm is denoting a register, no more instruction bytes.
6327 */
6328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6329 {
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 switch (pVCpu->iem.s.enmEffOpSize)
6332 {
6333 case IEMMODE_16BIT:
6334 IEM_MC_BEGIN(0, 1);
6335 IEM_MC_LOCAL(uint16_t, u16Value);
6336 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6337 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6338 IEM_MC_ADVANCE_RIP();
6339 IEM_MC_END();
6340 return VINF_SUCCESS;
6341
6342 case IEMMODE_32BIT:
6343 IEM_MC_BEGIN(0, 1);
6344 IEM_MC_LOCAL(uint32_t, u32Value);
6345 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6346 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6347 IEM_MC_ADVANCE_RIP();
6348 IEM_MC_END();
6349 return VINF_SUCCESS;
6350
6351 case IEMMODE_64BIT:
6352 IEM_MC_BEGIN(0, 1);
6353 IEM_MC_LOCAL(uint64_t, u64Value);
6354 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6355 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6356 IEM_MC_ADVANCE_RIP();
6357 IEM_MC_END();
6358 return VINF_SUCCESS;
6359
6360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6361 }
6362 }
6363 else
6364 {
6365 /*
6366 * We're loading a register from memory.
6367 */
6368 switch (pVCpu->iem.s.enmEffOpSize)
6369 {
6370 case IEMMODE_16BIT:
6371 IEM_MC_BEGIN(0, 2);
6372 IEM_MC_LOCAL(uint16_t, u16Value);
6373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6376 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6377 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6378 IEM_MC_ADVANCE_RIP();
6379 IEM_MC_END();
6380 return VINF_SUCCESS;
6381
6382 case IEMMODE_32BIT:
6383 IEM_MC_BEGIN(0, 2);
6384 IEM_MC_LOCAL(uint32_t, u32Value);
6385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6388 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6389 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6390 IEM_MC_ADVANCE_RIP();
6391 IEM_MC_END();
6392 return VINF_SUCCESS;
6393
6394 case IEMMODE_64BIT:
6395 IEM_MC_BEGIN(0, 2);
6396 IEM_MC_LOCAL(uint64_t, u64Value);
6397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6401 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6402 IEM_MC_ADVANCE_RIP();
6403 IEM_MC_END();
6404 return VINF_SUCCESS;
6405
6406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6407 }
6408 }
6409}
6410
6411
6412/** Opcode 0x0f 0xb7. */
6413FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6414{
6415 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6416 IEMOP_HLP_MIN_386();
6417
6418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6419
6420 /** @todo Not entirely sure how the operand size prefix is handled here,
6421 * assuming that it will be ignored. Would be nice to have a few
6422 * test for this. */
6423 /*
6424 * If rm is denoting a register, no more instruction bytes.
6425 */
6426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6427 {
6428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6429 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6430 {
6431 IEM_MC_BEGIN(0, 1);
6432 IEM_MC_LOCAL(uint32_t, u32Value);
6433 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6434 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 }
6438 else
6439 {
6440 IEM_MC_BEGIN(0, 1);
6441 IEM_MC_LOCAL(uint64_t, u64Value);
6442 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6443 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6444 IEM_MC_ADVANCE_RIP();
6445 IEM_MC_END();
6446 }
6447 }
6448 else
6449 {
6450 /*
6451 * We're loading a register from memory.
6452 */
6453 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6454 {
6455 IEM_MC_BEGIN(0, 2);
6456 IEM_MC_LOCAL(uint32_t, u32Value);
6457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6460 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6461 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6462 IEM_MC_ADVANCE_RIP();
6463 IEM_MC_END();
6464 }
6465 else
6466 {
6467 IEM_MC_BEGIN(0, 2);
6468 IEM_MC_LOCAL(uint64_t, u64Value);
6469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6472 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6473 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6474 IEM_MC_ADVANCE_RIP();
6475 IEM_MC_END();
6476 }
6477 }
6478 return VINF_SUCCESS;
6479}
6480
6481
6482/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
6483FNIEMOP_UD_STUB(iemOp_jmpe);
6484/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
6485FNIEMOP_STUB(iemOp_popcnt_Gv_Ev);
6486
6487
6488/** Opcode 0x0f 0xb9. */
6489FNIEMOP_DEF(iemOp_Grp10)
6490{
6491 Log(("iemOp_Grp10 -> #UD\n"));
6492 return IEMOP_RAISE_INVALID_OPCODE();
6493}
6494
6495
6496/** Opcode 0x0f 0xba. */
6497FNIEMOP_DEF(iemOp_Grp8)
6498{
6499 IEMOP_HLP_MIN_386();
6500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6501 PCIEMOPBINSIZES pImpl;
6502 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6503 {
6504 case 0: case 1: case 2: case 3:
6505 return IEMOP_RAISE_INVALID_OPCODE();
6506 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6507 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6508 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6509 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6511 }
6512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6513
6514 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6515 {
6516 /* register destination. */
6517 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6519
6520 switch (pVCpu->iem.s.enmEffOpSize)
6521 {
6522 case IEMMODE_16BIT:
6523 IEM_MC_BEGIN(3, 0);
6524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6525 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6527
6528 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6529 IEM_MC_REF_EFLAGS(pEFlags);
6530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6531
6532 IEM_MC_ADVANCE_RIP();
6533 IEM_MC_END();
6534 return VINF_SUCCESS;
6535
6536 case IEMMODE_32BIT:
6537 IEM_MC_BEGIN(3, 0);
6538 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6539 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6541
6542 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6543 IEM_MC_REF_EFLAGS(pEFlags);
6544 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6545
6546 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6547 IEM_MC_ADVANCE_RIP();
6548 IEM_MC_END();
6549 return VINF_SUCCESS;
6550
6551 case IEMMODE_64BIT:
6552 IEM_MC_BEGIN(3, 0);
6553 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6554 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6555 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6556
6557 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6558 IEM_MC_REF_EFLAGS(pEFlags);
6559 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6560
6561 IEM_MC_ADVANCE_RIP();
6562 IEM_MC_END();
6563 return VINF_SUCCESS;
6564
6565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6566 }
6567 }
6568 else
6569 {
6570 /* memory destination. */
6571
6572 uint32_t fAccess;
6573 if (pImpl->pfnLockedU16)
6574 fAccess = IEM_ACCESS_DATA_RW;
6575 else /* BT */
6576 fAccess = IEM_ACCESS_DATA_R;
6577
6578 /** @todo test negative bit offsets! */
6579 switch (pVCpu->iem.s.enmEffOpSize)
6580 {
6581 case IEMMODE_16BIT:
6582 IEM_MC_BEGIN(3, 1);
6583 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6584 IEM_MC_ARG(uint16_t, u16Src, 1);
6585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6587
6588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6589 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6590 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6591 if (pImpl->pfnLockedU16)
6592 IEMOP_HLP_DONE_DECODING();
6593 else
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_FETCH_EFLAGS(EFlags);
6596 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6597 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6599 else
6600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6601 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6602
6603 IEM_MC_COMMIT_EFLAGS(EFlags);
6604 IEM_MC_ADVANCE_RIP();
6605 IEM_MC_END();
6606 return VINF_SUCCESS;
6607
6608 case IEMMODE_32BIT:
6609 IEM_MC_BEGIN(3, 1);
6610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6611 IEM_MC_ARG(uint32_t, u32Src, 1);
6612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6614
6615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6616 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6617 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6618 if (pImpl->pfnLockedU16)
6619 IEMOP_HLP_DONE_DECODING();
6620 else
6621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6622 IEM_MC_FETCH_EFLAGS(EFlags);
6623 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6624 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6625 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6626 else
6627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6628 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6629
6630 IEM_MC_COMMIT_EFLAGS(EFlags);
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634
6635 case IEMMODE_64BIT:
6636 IEM_MC_BEGIN(3, 1);
6637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6638 IEM_MC_ARG(uint64_t, u64Src, 1);
6639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6643 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6644 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6645 if (pImpl->pfnLockedU16)
6646 IEMOP_HLP_DONE_DECODING();
6647 else
6648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6649 IEM_MC_FETCH_EFLAGS(EFlags);
6650 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6651 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6652 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6653 else
6654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6655 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6656
6657 IEM_MC_COMMIT_EFLAGS(EFlags);
6658 IEM_MC_ADVANCE_RIP();
6659 IEM_MC_END();
6660 return VINF_SUCCESS;
6661
6662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6663 }
6664 }
6665
6666}
6667
6668
6669/** Opcode 0x0f 0xbb. */
6670FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6671{
6672 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6673 IEMOP_HLP_MIN_386();
6674 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6675}
6676
6677
6678/** Opcode 0x0f 0xbc. */
6679FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6680{
6681 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6682 IEMOP_HLP_MIN_386();
6683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6684 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6685}
6686
6687
6688/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
6689FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev);
6690
6691
6692/** Opcode 0x0f 0xbd. */
6693FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6694{
6695 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6696 IEMOP_HLP_MIN_386();
6697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6698 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6699}
6700
6701
6702/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
6703FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev);
6704
6705
6706/** Opcode 0x0f 0xbe. */
6707FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6708{
6709 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6710 IEMOP_HLP_MIN_386();
6711
6712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6713
6714 /*
6715 * If rm is denoting a register, no more instruction bytes.
6716 */
6717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6718 {
6719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6720 switch (pVCpu->iem.s.enmEffOpSize)
6721 {
6722 case IEMMODE_16BIT:
6723 IEM_MC_BEGIN(0, 1);
6724 IEM_MC_LOCAL(uint16_t, u16Value);
6725 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6726 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6727 IEM_MC_ADVANCE_RIP();
6728 IEM_MC_END();
6729 return VINF_SUCCESS;
6730
6731 case IEMMODE_32BIT:
6732 IEM_MC_BEGIN(0, 1);
6733 IEM_MC_LOCAL(uint32_t, u32Value);
6734 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6735 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6736 IEM_MC_ADVANCE_RIP();
6737 IEM_MC_END();
6738 return VINF_SUCCESS;
6739
6740 case IEMMODE_64BIT:
6741 IEM_MC_BEGIN(0, 1);
6742 IEM_MC_LOCAL(uint64_t, u64Value);
6743 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6744 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6745 IEM_MC_ADVANCE_RIP();
6746 IEM_MC_END();
6747 return VINF_SUCCESS;
6748
6749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6750 }
6751 }
6752 else
6753 {
6754 /*
6755 * We're loading a register from memory.
6756 */
6757 switch (pVCpu->iem.s.enmEffOpSize)
6758 {
6759 case IEMMODE_16BIT:
6760 IEM_MC_BEGIN(0, 2);
6761 IEM_MC_LOCAL(uint16_t, u16Value);
6762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6765 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6766 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6767 IEM_MC_ADVANCE_RIP();
6768 IEM_MC_END();
6769 return VINF_SUCCESS;
6770
6771 case IEMMODE_32BIT:
6772 IEM_MC_BEGIN(0, 2);
6773 IEM_MC_LOCAL(uint32_t, u32Value);
6774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6777 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6778 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6779 IEM_MC_ADVANCE_RIP();
6780 IEM_MC_END();
6781 return VINF_SUCCESS;
6782
6783 case IEMMODE_64BIT:
6784 IEM_MC_BEGIN(0, 2);
6785 IEM_MC_LOCAL(uint64_t, u64Value);
6786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6789 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6790 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6791 IEM_MC_ADVANCE_RIP();
6792 IEM_MC_END();
6793 return VINF_SUCCESS;
6794
6795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6796 }
6797 }
6798}
6799
6800
6801/** Opcode 0x0f 0xbf. */
6802FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6803{
6804 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6805 IEMOP_HLP_MIN_386();
6806
6807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6808
6809 /** @todo Not entirely sure how the operand size prefix is handled here,
6810 * assuming that it will be ignored. Would be nice to have a few
6811 * test for this. */
6812 /*
6813 * If rm is denoting a register, no more instruction bytes.
6814 */
6815 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6816 {
6817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6818 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6819 {
6820 IEM_MC_BEGIN(0, 1);
6821 IEM_MC_LOCAL(uint32_t, u32Value);
6822 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6823 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6824 IEM_MC_ADVANCE_RIP();
6825 IEM_MC_END();
6826 }
6827 else
6828 {
6829 IEM_MC_BEGIN(0, 1);
6830 IEM_MC_LOCAL(uint64_t, u64Value);
6831 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6832 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6833 IEM_MC_ADVANCE_RIP();
6834 IEM_MC_END();
6835 }
6836 }
6837 else
6838 {
6839 /*
6840 * We're loading a register from memory.
6841 */
6842 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6843 {
6844 IEM_MC_BEGIN(0, 2);
6845 IEM_MC_LOCAL(uint32_t, u32Value);
6846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6849 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6850 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6851 IEM_MC_ADVANCE_RIP();
6852 IEM_MC_END();
6853 }
6854 else
6855 {
6856 IEM_MC_BEGIN(0, 2);
6857 IEM_MC_LOCAL(uint64_t, u64Value);
6858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6861 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6862 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 }
6866 }
6867 return VINF_SUCCESS;
6868}
6869
6870
6871/** Opcode 0x0f 0xc0. */
6872FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6873{
6874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6875 IEMOP_HLP_MIN_486();
6876 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6877
6878 /*
6879 * If rm is denoting a register, no more instruction bytes.
6880 */
6881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6882 {
6883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6884
6885 IEM_MC_BEGIN(3, 0);
6886 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6887 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6889
6890 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6891 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6892 IEM_MC_REF_EFLAGS(pEFlags);
6893 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6894
6895 IEM_MC_ADVANCE_RIP();
6896 IEM_MC_END();
6897 }
6898 else
6899 {
6900 /*
6901 * We're accessing memory.
6902 */
6903 IEM_MC_BEGIN(3, 3);
6904 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6905 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6906 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6907 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6909
6910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6911 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6912 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6913 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6914 IEM_MC_FETCH_EFLAGS(EFlags);
6915 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6916 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6917 else
6918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6919
6920 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6921 IEM_MC_COMMIT_EFLAGS(EFlags);
6922 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6923 IEM_MC_ADVANCE_RIP();
6924 IEM_MC_END();
6925 return VINF_SUCCESS;
6926 }
6927 return VINF_SUCCESS;
6928}
6929
6930
6931/** Opcode 0x0f 0xc1. */
6932FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6933{
6934 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6935 IEMOP_HLP_MIN_486();
6936 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6937
6938 /*
6939 * If rm is denoting a register, no more instruction bytes.
6940 */
6941 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6942 {
6943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6944
6945 switch (pVCpu->iem.s.enmEffOpSize)
6946 {
6947 case IEMMODE_16BIT:
6948 IEM_MC_BEGIN(3, 0);
6949 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6950 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6951 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6952
6953 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6954 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6955 IEM_MC_REF_EFLAGS(pEFlags);
6956 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6957
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 case IEMMODE_32BIT:
6963 IEM_MC_BEGIN(3, 0);
6964 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6965 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6967
6968 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6969 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6970 IEM_MC_REF_EFLAGS(pEFlags);
6971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6972
6973 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6974 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6975 IEM_MC_ADVANCE_RIP();
6976 IEM_MC_END();
6977 return VINF_SUCCESS;
6978
6979 case IEMMODE_64BIT:
6980 IEM_MC_BEGIN(3, 0);
6981 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6982 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6983 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6984
6985 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6986 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6987 IEM_MC_REF_EFLAGS(pEFlags);
6988 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6989
6990 IEM_MC_ADVANCE_RIP();
6991 IEM_MC_END();
6992 return VINF_SUCCESS;
6993
6994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6995 }
6996 }
6997 else
6998 {
6999 /*
7000 * We're accessing memory.
7001 */
7002 switch (pVCpu->iem.s.enmEffOpSize)
7003 {
7004 case IEMMODE_16BIT:
7005 IEM_MC_BEGIN(3, 3);
7006 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7007 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
7008 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7009 IEM_MC_LOCAL(uint16_t, u16RegCopy);
7010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7011
7012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7013 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7014 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7015 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
7016 IEM_MC_FETCH_EFLAGS(EFlags);
7017 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7018 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
7019 else
7020 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
7021
7022 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7023 IEM_MC_COMMIT_EFLAGS(EFlags);
7024 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
7025 IEM_MC_ADVANCE_RIP();
7026 IEM_MC_END();
7027 return VINF_SUCCESS;
7028
7029 case IEMMODE_32BIT:
7030 IEM_MC_BEGIN(3, 3);
7031 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7032 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
7033 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7034 IEM_MC_LOCAL(uint32_t, u32RegCopy);
7035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7036
7037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7038 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7039 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7040 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
7041 IEM_MC_FETCH_EFLAGS(EFlags);
7042 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7043 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
7044 else
7045 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
7046
7047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7048 IEM_MC_COMMIT_EFLAGS(EFlags);
7049 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
7050 IEM_MC_ADVANCE_RIP();
7051 IEM_MC_END();
7052 return VINF_SUCCESS;
7053
7054 case IEMMODE_64BIT:
7055 IEM_MC_BEGIN(3, 3);
7056 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7057 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
7058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7059 IEM_MC_LOCAL(uint64_t, u64RegCopy);
7060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7061
7062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7063 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7064 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7065 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
7066 IEM_MC_FETCH_EFLAGS(EFlags);
7067 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
7069 else
7070 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
7071
7072 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7073 IEM_MC_COMMIT_EFLAGS(EFlags);
7074 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
7075 IEM_MC_ADVANCE_RIP();
7076 IEM_MC_END();
7077 return VINF_SUCCESS;
7078
7079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7080 }
7081 }
7082}
7083
7084
7085/** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */
7086FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
7087/** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
7088FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
7089/** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */
7090FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
7091/** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
7092FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
7093
7094
7095/** Opcode 0x0f 0xc3. */
7096FNIEMOP_DEF(iemOp_movnti_My_Gy)
7097{
7098 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
7099
7100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7101
7102 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
7103 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7104 {
7105 switch (pVCpu->iem.s.enmEffOpSize)
7106 {
7107 case IEMMODE_32BIT:
7108 IEM_MC_BEGIN(0, 2);
7109 IEM_MC_LOCAL(uint32_t, u32Value);
7110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7111
7112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7115 return IEMOP_RAISE_INVALID_OPCODE();
7116
7117 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7118 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
7119 IEM_MC_ADVANCE_RIP();
7120 IEM_MC_END();
7121 break;
7122
7123 case IEMMODE_64BIT:
7124 IEM_MC_BEGIN(0, 2);
7125 IEM_MC_LOCAL(uint64_t, u64Value);
7126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7127
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
7131 return IEMOP_RAISE_INVALID_OPCODE();
7132
7133 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7134 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
7135 IEM_MC_ADVANCE_RIP();
7136 IEM_MC_END();
7137 break;
7138
7139 case IEMMODE_16BIT:
7140 /** @todo check this form. */
7141 return IEMOP_RAISE_INVALID_OPCODE();
7142 }
7143 }
7144 else
7145 return IEMOP_RAISE_INVALID_OPCODE();
7146 return VINF_SUCCESS;
7147}
7148/* Opcode 0x66 0x0f 0xc3 - invalid */
7149/* Opcode 0xf3 0x0f 0xc3 - invalid */
7150/* Opcode 0xf2 0x0f 0xc3 - invalid */
7151
7152/** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */
7153FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib);
7154/** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
7155FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib);
7156/* Opcode 0xf3 0x0f 0xc4 - invalid */
7157/* Opcode 0xf2 0x0f 0xc4 - invalid */
7158
7159/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
7160FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib);
7161/** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */
7162FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib);
7163/* Opcode 0xf3 0x0f 0xc5 - invalid */
7164/* Opcode 0xf2 0x0f 0xc5 - invalid */
7165
7166/** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */
7167FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib);
7168/** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
7169FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib);
7170/* Opcode 0xf3 0x0f 0xc6 - invalid */
7171/* Opcode 0xf2 0x0f 0xc6 - invalid */
7172
7173
7174/** Opcode 0x0f 0xc7 !11/1. */
7175FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
7176{
7177 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
7178
7179 IEM_MC_BEGIN(4, 3);
7180 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
7181 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
7182 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
7183 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7184 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
7185 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
7186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7187
7188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7189 IEMOP_HLP_DONE_DECODING();
7190 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7191
7192 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
7193 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
7194 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
7195
7196 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
7197 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
7198 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
7199
7200 IEM_MC_FETCH_EFLAGS(EFlags);
7201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7203 else
7204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
7205
7206 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
7207 IEM_MC_COMMIT_EFLAGS(EFlags);
7208 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7209 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
7210 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
7211 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
7212 IEM_MC_ENDIF();
7213 IEM_MC_ADVANCE_RIP();
7214
7215 IEM_MC_END();
7216 return VINF_SUCCESS;
7217}
7218
7219
7220/** Opcode REX.W 0x0f 0xc7 !11/1. */
7221FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
7222{
7223 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
7224 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7225 {
7226#if 0
7227 RT_NOREF(bRm);
7228 IEMOP_BITCH_ABOUT_STUB();
7229 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7230#else
7231 IEM_MC_BEGIN(4, 3);
7232 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
7233 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
7234 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
7235 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
7236 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
7237 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
7238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7239
7240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7241 IEMOP_HLP_DONE_DECODING();
7242 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
7243 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7244
7245 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
7246 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
7247 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
7248
7249 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
7250 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
7251 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
7252
7253 IEM_MC_FETCH_EFLAGS(EFlags);
7254# ifdef RT_ARCH_AMD64
7255 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
7256 {
7257 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
7258 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7259 else
7260 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7261 }
7262 else
7263# endif
7264 {
7265 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
7266 accesses and not all all atomic, which works fine on in UNI CPU guest
7267 configuration (ignoring DMA). If guest SMP is active we have no choice
7268 but to use a rendezvous callback here. Sigh. */
7269 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
7270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7271 else
7272 {
7273 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
7274 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
7275 }
7276 }
7277
7278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
7279 IEM_MC_COMMIT_EFLAGS(EFlags);
7280 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7281 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
7282 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
7283 IEM_MC_ENDIF();
7284 IEM_MC_ADVANCE_RIP();
7285
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288#endif
7289 }
7290 Log(("cmpxchg16b -> #UD\n"));
7291 return IEMOP_RAISE_INVALID_OPCODE();
7292}
7293
7294
7295/** Opcode 0x0f 0xc7 11/6. */
7296FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
7297
7298/** Opcode 0x0f 0xc7 !11/6. */
7299FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
7300
7301/** Opcode 0x66 0x0f 0xc7 !11/6. */
7302FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
7303
7304/** Opcode 0xf3 0x0f 0xc7 !11/6. */
7305FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
7306
7307/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
7308FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
7309
7310
7311/** Opcode 0x0f 0xc7. */
7312FNIEMOP_DEF(iemOp_Grp9)
7313{
7314 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
7315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7316 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7317 {
7318 case 0: case 2: case 3: case 4: case 5:
7319 return IEMOP_RAISE_INVALID_OPCODE();
7320 case 1:
7321 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
7322 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
7323 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
7324 return IEMOP_RAISE_INVALID_OPCODE();
7325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7326 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
7327 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
7328 case 6:
7329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7330 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
7331 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7332 {
7333 case 0:
7334 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
7335 case IEM_OP_PRF_SIZE_OP:
7336 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
7337 case IEM_OP_PRF_REPZ:
7338 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
7339 default:
7340 return IEMOP_RAISE_INVALID_OPCODE();
7341 }
7342 case 7:
7343 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
7344 {
7345 case 0:
7346 case IEM_OP_PRF_REPZ:
7347 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
7348 default:
7349 return IEMOP_RAISE_INVALID_OPCODE();
7350 }
7351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7352 }
7353}
7354
7355
7356/**
7357 * Common 'bswap register' helper.
7358 */
7359FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
7360{
7361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7362 switch (pVCpu->iem.s.enmEffOpSize)
7363 {
7364 case IEMMODE_16BIT:
7365 IEM_MC_BEGIN(1, 0);
7366 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7367 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
7368 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
7369 IEM_MC_ADVANCE_RIP();
7370 IEM_MC_END();
7371 return VINF_SUCCESS;
7372
7373 case IEMMODE_32BIT:
7374 IEM_MC_BEGIN(1, 0);
7375 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7376 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7377 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7378 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
7379 IEM_MC_ADVANCE_RIP();
7380 IEM_MC_END();
7381 return VINF_SUCCESS;
7382
7383 case IEMMODE_64BIT:
7384 IEM_MC_BEGIN(1, 0);
7385 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7386 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7387 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
7388 IEM_MC_ADVANCE_RIP();
7389 IEM_MC_END();
7390 return VINF_SUCCESS;
7391
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394}
7395
7396
7397/** Opcode 0x0f 0xc8. */
7398FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7399{
7400 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7401 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7402 prefix. REX.B is the correct prefix it appears. For a parallel
7403 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7404 IEMOP_HLP_MIN_486();
7405 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7406}
7407
7408
7409/** Opcode 0x0f 0xc9. */
7410FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7411{
7412 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7413 IEMOP_HLP_MIN_486();
7414 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7415}
7416
7417
7418/** Opcode 0x0f 0xca. */
7419FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7420{
7421 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7422 IEMOP_HLP_MIN_486();
7423 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7424}
7425
7426
7427/** Opcode 0x0f 0xcb. */
7428FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7429{
7430 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7431 IEMOP_HLP_MIN_486();
7432 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7433}
7434
7435
7436/** Opcode 0x0f 0xcc. */
7437FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7438{
7439 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7440 IEMOP_HLP_MIN_486();
7441 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7442}
7443
7444
7445/** Opcode 0x0f 0xcd. */
7446FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7447{
7448 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7449 IEMOP_HLP_MIN_486();
7450 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7451}
7452
7453
7454/** Opcode 0x0f 0xce. */
7455FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7456{
7457 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7458 IEMOP_HLP_MIN_486();
7459 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7460}
7461
7462
7463/** Opcode 0x0f 0xcf. */
7464FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7465{
7466 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7467 IEMOP_HLP_MIN_486();
7468 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7469}
7470
7471
7472/* Opcode 0x0f 0xd0 - invalid */
7473/** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
7474FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
7475/* Opcode 0xf3 0x0f 0xd0 - invalid */
7476/** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */
7477FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
7478
7479/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
7480FNIEMOP_STUB(iemOp_psrlw_Pq_Qq);
7481/** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */
7482FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
7483/* Opcode 0xf3 0x0f 0xd1 - invalid */
7484/* Opcode 0xf2 0x0f 0xd1 - invalid */
7485
7486/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
7487FNIEMOP_STUB(iemOp_psrld_Pq_Qq);
7488/** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */
7489FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
7490/* Opcode 0xf3 0x0f 0xd2 - invalid */
7491/* Opcode 0xf2 0x0f 0xd2 - invalid */
7492
7493/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
7494FNIEMOP_STUB(iemOp_psrlq_Pq_Qq);
7495/** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */
7496FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
7497/* Opcode 0xf3 0x0f 0xd3 - invalid */
7498/* Opcode 0xf2 0x0f 0xd3 - invalid */
7499
7500/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
7501FNIEMOP_STUB(iemOp_paddq_Pq_Qq);
7502/** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */
7503FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W);
7504/* Opcode 0xf3 0x0f 0xd4 - invalid */
7505/* Opcode 0xf2 0x0f 0xd4 - invalid */
7506
7507/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
7508FNIEMOP_STUB(iemOp_pmullw_Pq_Qq);
7509/** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */
7510FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx);
7511/* Opcode 0xf3 0x0f 0xd5 - invalid */
7512/* Opcode 0xf2 0x0f 0xd5 - invalid */
7513
7514/* Opcode 0x0f 0xd6 - invalid */
7515/** Opcode 0x66 0x0f 0xd6 - vmovq Wq, Vq */
7516FNIEMOP_STUB(iemOp_vmovq_Wq_Vq);
7517/** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */
7518FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq);
7519/** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */
7520FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq);
7521#if 0
7522FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq)
7523{
7524 /* Docs says register only. */
7525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7526
7527 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7528 {
7529 case IEM_OP_PRF_SIZE_OP: /* SSE */
7530 IEMOP_MNEMONIC(movq_Wq_Vq, "movq Wq,Vq");
7531 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7532 IEM_MC_BEGIN(2, 0);
7533 IEM_MC_ARG(uint64_t *, pDst, 0);
7534 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7535 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7536 IEM_MC_PREPARE_SSE_USAGE();
7537 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7538 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7539 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7540 IEM_MC_ADVANCE_RIP();
7541 IEM_MC_END();
7542 return VINF_SUCCESS;
7543
7544 case 0: /* MMX */
7545 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7546 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7547 IEM_MC_BEGIN(2, 0);
7548 IEM_MC_ARG(uint64_t *, pDst, 0);
7549 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7550 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7551 IEM_MC_PREPARE_FPU_USAGE();
7552 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7553 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7554 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7555 IEM_MC_ADVANCE_RIP();
7556 IEM_MC_END();
7557 return VINF_SUCCESS;
7558
7559 default:
7560 return IEMOP_RAISE_INVALID_OPCODE();
7561 }
7562}
7563#endif
7564
7565
7566/** Opcode 0x0f 0xd7. */
7567FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7568{
7569 /* Docs says register only. */
7570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7571 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7572 return IEMOP_RAISE_INVALID_OPCODE();
7573
7574 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7575 /** @todo testcase: Check that the instruction implicitly clears the high
7576 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7577 * and opcode modifications are made to work with the whole width (not
7578 * just 128). */
7579 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7580 {
7581 case IEM_OP_PRF_SIZE_OP: /* SSE */
7582 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7583 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7584 IEM_MC_BEGIN(2, 0);
7585 IEM_MC_ARG(uint64_t *, pDst, 0);
7586 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7587 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7588 IEM_MC_PREPARE_SSE_USAGE();
7589 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7590 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7591 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7592 IEM_MC_ADVANCE_RIP();
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595
7596 case 0: /* MMX */
7597 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7598 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7599 IEM_MC_BEGIN(2, 0);
7600 IEM_MC_ARG(uint64_t *, pDst, 0);
7601 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7602 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7603 IEM_MC_PREPARE_FPU_USAGE();
7604 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7605 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7606 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7607 IEM_MC_ADVANCE_RIP();
7608 IEM_MC_END();
7609 return VINF_SUCCESS;
7610
7611 default:
7612 return IEMOP_RAISE_INVALID_OPCODE();
7613 }
7614}
7615
7616
7617/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
7618FNIEMOP_STUB(iemOp_psubusb_Pq_Qq);
7619/** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */
7620FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
7621/* Opcode 0xf3 0x0f 0xd8 - invalid */
7622/* Opcode 0xf2 0x0f 0xd8 - invalid */
7623
7624/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
7625FNIEMOP_STUB(iemOp_psubusw_Pq_Qq);
7626/** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */
7627FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
7628/* Opcode 0xf3 0x0f 0xd9 - invalid */
7629/* Opcode 0xf2 0x0f 0xd9 - invalid */
7630
7631/** Opcode 0x0f 0xda - pminub Pq, Qq */
7632FNIEMOP_STUB(iemOp_pminub_Pq_Qq);
7633/** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */
7634FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx);
7635/* Opcode 0xf3 0x0f 0xda - invalid */
7636/* Opcode 0xf2 0x0f 0xda - invalid */
7637
7638/** Opcode 0x0f 0xdb - pand Pq, Qq */
7639FNIEMOP_STUB(iemOp_pand_Pq_Qq);
7640/** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */
7641FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W);
7642/* Opcode 0xf3 0x0f 0xdb - invalid */
7643/* Opcode 0xf2 0x0f 0xdb - invalid */
7644
7645/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
7646FNIEMOP_STUB(iemOp_paddusb_Pq_Qq);
7647/** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */
7648FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
7649/* Opcode 0xf3 0x0f 0xdc - invalid */
7650/* Opcode 0xf2 0x0f 0xdc - invalid */
7651
7652/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
7653FNIEMOP_STUB(iemOp_paddusw_Pq_Qq);
7654/** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */
7655FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
7656/* Opcode 0xf3 0x0f 0xdd - invalid */
7657/* Opcode 0xf2 0x0f 0xdd - invalid */
7658
7659/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
7660FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq);
7661/** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */
7662FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W);
7663/* Opcode 0xf3 0x0f 0xde - invalid */
7664/* Opcode 0xf2 0x0f 0xde - invalid */
7665
7666/** Opcode 0x0f 0xdf - pandn Pq, Qq */
7667FNIEMOP_STUB(iemOp_pandn_Pq_Qq);
7668/** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */
7669FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx);
7670/* Opcode 0xf3 0x0f 0xdf - invalid */
7671/* Opcode 0xf2 0x0f 0xdf - invalid */
7672
7673/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
7674FNIEMOP_STUB(iemOp_pavgb_Pq_Qq);
7675/** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */
7676FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx);
7677/* Opcode 0xf3 0x0f 0xe0 - invalid */
7678/* Opcode 0xf2 0x0f 0xe0 - invalid */
7679
7680/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
7681FNIEMOP_STUB(iemOp_psraw_Pq_Qq);
7682/** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */
7683FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
7684/* Opcode 0xf3 0x0f 0xe1 - invalid */
7685/* Opcode 0xf2 0x0f 0xe1 - invalid */
7686
7687/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
7688FNIEMOP_STUB(iemOp_psrad_Pq_Qq);
7689/** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */
7690FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
7691/* Opcode 0xf3 0x0f 0xe2 - invalid */
7692/* Opcode 0xf2 0x0f 0xe2 - invalid */
7693
7694/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
7695FNIEMOP_STUB(iemOp_pavgw_Pq_Qq);
7696/** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */
7697FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx);
7698/* Opcode 0xf3 0x0f 0xe3 - invalid */
7699/* Opcode 0xf2 0x0f 0xe3 - invalid */
7700
7701/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
7702FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq);
7703/** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */
7704FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W);
7705/* Opcode 0xf3 0x0f 0xe4 - invalid */
7706/* Opcode 0xf2 0x0f 0xe4 - invalid */
7707
7708/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
7709FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq);
7710/** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */
7711FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx);
7712/* Opcode 0xf3 0x0f 0xe5 - invalid */
7713/* Opcode 0xf2 0x0f 0xe5 - invalid */
7714
7715/* Opcode 0x0f 0xe6 - invalid */
7716/** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */
7717FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
7718/** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */
7719FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
7720/** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */
7721FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
7722
7723
7724/** Opcode 0x0f 0xe7. */
7725FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7726{
7727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7728 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7729 {
7730 /*
7731 * Register, memory.
7732 */
7733/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7734 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7735 {
7736
7737 case IEM_OP_PRF_SIZE_OP: /* SSE */
7738 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7739 IEM_MC_BEGIN(0, 2);
7740 IEM_MC_LOCAL(uint128_t, uSrc);
7741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7742
7743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7745 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7746 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7747
7748 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7749 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7750
7751 IEM_MC_ADVANCE_RIP();
7752 IEM_MC_END();
7753 break;
7754
7755 case 0: /* MMX */
7756 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7757 IEM_MC_BEGIN(0, 2);
7758 IEM_MC_LOCAL(uint64_t, uSrc);
7759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7760
7761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7763 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7764 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7765
7766 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7767 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7768
7769 IEM_MC_ADVANCE_RIP();
7770 IEM_MC_END();
7771 break;
7772
7773 default:
7774 return IEMOP_RAISE_INVALID_OPCODE();
7775 }
7776 }
7777 /* The register, register encoding is invalid. */
7778 else
7779 return IEMOP_RAISE_INVALID_OPCODE();
7780 return VINF_SUCCESS;
7781}
7782
7783
7784/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
7785FNIEMOP_STUB(iemOp_psubsb_Pq_Qq);
7786/** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */
7787FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
7788/* Opcode 0xf3 0x0f 0xe8 - invalid */
7789/* Opcode 0xf2 0x0f 0xe8 - invalid */
7790
7791/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
7792FNIEMOP_STUB(iemOp_psubsw_Pq_Qq);
7793/** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */
7794FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
7795/* Opcode 0xf3 0x0f 0xe9 - invalid */
7796/* Opcode 0xf2 0x0f 0xe9 - invalid */
7797
7798/** Opcode 0x0f 0xea - pminsw Pq, Qq */
7799FNIEMOP_STUB(iemOp_pminsw_Pq_Qq);
7800/** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */
7801FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx);
7802/* Opcode 0xf3 0x0f 0xea - invalid */
7803/* Opcode 0xf2 0x0f 0xea - invalid */
7804
7805/** Opcode 0x0f 0xeb - por Pq, Qq */
7806FNIEMOP_STUB(iemOp_por_Pq_Qq);
7807/** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */
7808FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W);
7809/* Opcode 0xf3 0x0f 0xeb - invalid */
7810/* Opcode 0xf2 0x0f 0xeb - invalid */
7811
7812/** Opcode 0x0f 0xec - paddsb Pq, Qq */
7813FNIEMOP_STUB(iemOp_paddsb_Pq_Qq);
7814/** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */
7815FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
7816/* Opcode 0xf3 0x0f 0xec - invalid */
7817/* Opcode 0xf2 0x0f 0xec - invalid */
7818
7819/** Opcode 0x0f 0xed - paddsw Pq, Qq */
7820FNIEMOP_STUB(iemOp_paddsw_Pq_Qq);
7821/** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */
7822FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
7823/* Opcode 0xf3 0x0f 0xed - invalid */
7824/* Opcode 0xf2 0x0f 0xed - invalid */
7825
7826/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
7827FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq);
7828/** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */
7829FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W);
7830/* Opcode 0xf3 0x0f 0xee - invalid */
7831/* Opcode 0xf2 0x0f 0xee - invalid */
7832
7833
7834/** Opcode 0x0f 0xef. */
7835FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7836{
7837 IEMOP_MNEMONIC(pxor, "pxor");
7838 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7839}
7840/* Opcode 0xf3 0x0f 0xef - invalid */
7841/* Opcode 0xf2 0x0f 0xef - invalid */
7842
7843/* Opcode 0x0f 0xf0 - invalid */
7844/* Opcode 0x66 0x0f 0xf0 - invalid */
7845/** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */
7846FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx);
7847
7848/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
7849FNIEMOP_STUB(iemOp_psllw_Pq_Qq);
7850/** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */
7851FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
7852/* Opcode 0xf2 0x0f 0xf1 - invalid */
7853
7854/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
7855FNIEMOP_STUB(iemOp_pslld_Pq_Qq);
7856/** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */
7857FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
7858/* Opcode 0xf2 0x0f 0xf2 - invalid */
7859
7860/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
7861FNIEMOP_STUB(iemOp_psllq_Pq_Qq);
7862/** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */
7863FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
7864/* Opcode 0xf2 0x0f 0xf3 - invalid */
7865
7866/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
7867FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq);
7868/** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */
7869FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W);
7870/* Opcode 0xf2 0x0f 0xf4 - invalid */
7871
7872/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
7873FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq);
7874/** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */
7875FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
7876/* Opcode 0xf2 0x0f 0xf5 - invalid */
7877
7878/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
7879FNIEMOP_STUB(iemOp_psadbw_Pq_Qq);
7880/** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */
7881FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx);
7882/* Opcode 0xf2 0x0f 0xf6 - invalid */
7883
7884/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
7885FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
7886/** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */
7887FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
7888/* Opcode 0xf2 0x0f 0xf7 - invalid */
7889
7890/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
7891FNIEMOP_STUB(iemOp_psubb_Pq_Qq);
7892/** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */
7893FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W);
7894/* Opcode 0xf2 0x0f 0xf8 - invalid */
7895
7896/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
7897FNIEMOP_STUB(iemOp_psubw_Pq_Qq);
7898/** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */
7899FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx);
7900/* Opcode 0xf2 0x0f 0xf9 - invalid */
7901
7902/** Opcode 0x0f 0xfa - psubd Pq, Qq */
7903FNIEMOP_STUB(iemOp_psubd_Pq_Qq);
7904/** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */
7905FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx);
7906/* Opcode 0xf2 0x0f 0xfa - invalid */
7907
7908/** Opcode 0x0f 0xfb - psubq Pq, Qq */
7909FNIEMOP_STUB(iemOp_psubq_Pq_Qq);
7910/** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */
7911FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W);
7912/* Opcode 0xf2 0x0f 0xfb - invalid */
7913
7914/** Opcode 0x0f 0xfc - paddb Pq, Qq */
7915FNIEMOP_STUB(iemOp_paddb_Pq_Qq);
7916/** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */
7917FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx);
7918/* Opcode 0xf2 0x0f 0xfc - invalid */
7919
7920/** Opcode 0x0f 0xfd - paddw Pq, Qq */
7921FNIEMOP_STUB(iemOp_paddw_Pq_Qq);
7922/** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */
7923FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx);
7924/* Opcode 0xf2 0x0f 0xfd - invalid */
7925
7926/** Opcode 0x0f 0xfe - paddd Pq, Qq */
7927FNIEMOP_STUB(iemOp_paddd_Pq_Qq);
7928/** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */
7929FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W);
7930/* Opcode 0xf2 0x0f 0xfe - invalid */
7931
7932
7933/** Opcode **** 0x0f 0xff - UD0 */
7934FNIEMOP_DEF(iemOp_ud0)
7935{
7936 IEMOP_MNEMONIC(ud0, "ud0");
7937 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7938 {
7939 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7940#ifndef TST_IEM_CHECK_MC
7941 RTGCPTR GCPtrEff;
7942 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
7943 if (rcStrict != VINF_SUCCESS)
7944 return rcStrict;
7945#endif
7946 IEMOP_HLP_DONE_DECODING();
7947 }
7948 return IEMOP_RAISE_INVALID_OPCODE();
7949}
7950
7951
7952
7953/** Repeats a_fn four times. For decoding tables. */
7954#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
7955
7956IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =
7957{
7958 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7959 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
7960 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
7961 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
7962 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
7963 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
7964 /* 0x05 */ IEMOP_X4(iemOp_syscall),
7965 /* 0x06 */ IEMOP_X4(iemOp_clts),
7966 /* 0x07 */ IEMOP_X4(iemOp_sysret),
7967 /* 0x08 */ IEMOP_X4(iemOp_invd),
7968 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
7969 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
7970 /* 0x0b */ IEMOP_X4(iemOp_ud2),
7971 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
7972 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
7973 /* 0x0e */ IEMOP_X4(iemOp_femms),
7974 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
7975
7976 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,
7977 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,
7978 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7979 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7980 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7981 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7982 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7983 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7984 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
7985 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
7986 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
7987 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
7988 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
7989 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
7990 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
7991 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
7992
7993 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
7994 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
7995 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
7996 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
7997 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
7998 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
7999 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
8000 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
8001 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8002 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM,
8003 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
8004 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8005 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
8006 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
8007 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8008 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8009
8010 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
8011 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
8012 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
8013 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
8014 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
8015 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
8016 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
8017 /* 0x37 */ IEMOP_X4(iemOp_getsec),
8018 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),
8019 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8020 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),
8021 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8022 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8023 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
8024 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8025 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
8026
8027 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
8028 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
8029 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
8030 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
8031 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
8032 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
8033 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
8034 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
8035 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
8036 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
8037 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
8038 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
8039 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
8040 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
8041 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
8042 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
8043
8044 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8045 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
8046 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8047 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
8048 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8049 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8050 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8051 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8052 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
8053 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
8054 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
8055 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
8056 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
8057 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
8058 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
8059 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
8060
8061 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8062 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8063 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8064 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8065 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8066 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8067 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8068 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8069 /* 0x68 */ IEMOP_X4(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq),
8070 /* 0x69 */ IEMOP_X4(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq),
8071 /* 0x6a */ IEMOP_X4(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq),
8072 /* 0x6b */ IEMOP_X4(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq),
8073 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8074 /* 0x6d */ IEMOP_X4(iemOp_punpckhqdq_Vdq_Wdq),
8075 /* 0x6e */ IEMOP_X4(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey),
8076 /* 0x6f */ IEMOP_X4(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq),
8077
8078 /* 0x70 */ IEMOP_X4(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib),
8079 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
8080 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
8081 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
8082 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8083 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8084 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8085 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8086
8087 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8088 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8089 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8090 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8091 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
8092 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
8093 /* 0x7e */ IEMOP_X4(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq),
8094 /* 0x7f */ IEMOP_X4(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq),
8095
8096 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
8097 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
8098 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
8099 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
8100 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
8101 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
8102 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
8103 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
8104 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
8105 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
8106 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
8107 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
8108 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
8109 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
8110 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
8111 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
8112
8113 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
8114 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
8115 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
8116 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
8117 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
8118 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
8119 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
8120 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
8121 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
8122 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
8123 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
8124 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
8125 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
8126 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
8127 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
8128 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
8129
8130 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
8131 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
8132 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
8133 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
8134 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
8135 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
8136 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
8137 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
8138 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
8139 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
8140 /* 0xaa */ IEMOP_X4(iemOp_rsm),
8141 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
8142 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
8143 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
8144 /* 0xae */ IEMOP_X4(iemOp_Grp15),
8145 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
8146
8147 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
8148 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
8149 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
8150 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
8151 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
8152 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
8153 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
8154 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
8155 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
8156 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
8157 /* 0xba */ IEMOP_X4(iemOp_Grp8),
8158 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
8159 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
8160 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
8161 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
8162 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
8163
8164 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
8165 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
8166 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
8167 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8168 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8169 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
8170 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
8171 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
8172 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
8173 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
8174 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
8175 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
8176 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
8177 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
8178 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
8179 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
8180
8181 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
8182 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8183 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8184 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8185 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8186 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8187 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
8188 /* 0xd7 */ IEMOP_X4(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq),
8189 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8190 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8191 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8192 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8193 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8194 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8195 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8196 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8197
8198 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8199 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8200 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8201 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8202 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8203 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8204 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
8205 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8206 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8207 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8208 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8209 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8210 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8211 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8212 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8213 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8214
8215 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
8216 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8217 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8218 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8219 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8220 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8221 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8222 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8223 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8224 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8225 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8226 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8227 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8228 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8229 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
8230 /* 0xff */ IEMOP_X4(iemOp_ud0),
8231};
8232AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
8233/** @} */
8234
8235
8236/** @name One byte opcodes.
8237 *
8238 * @{
8239 */
8240
8241/** Opcode 0x00. */
8242FNIEMOP_DEF(iemOp_add_Eb_Gb)
8243{
8244 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
8245 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
8246}
8247
8248
8249/** Opcode 0x01. */
8250FNIEMOP_DEF(iemOp_add_Ev_Gv)
8251{
8252 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
8253 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
8254}
8255
8256
8257/** Opcode 0x02. */
8258FNIEMOP_DEF(iemOp_add_Gb_Eb)
8259{
8260 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
8261 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
8262}
8263
8264
8265/** Opcode 0x03. */
8266FNIEMOP_DEF(iemOp_add_Gv_Ev)
8267{
8268 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
8269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
8270}
8271
8272
8273/** Opcode 0x04. */
8274FNIEMOP_DEF(iemOp_add_Al_Ib)
8275{
8276 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
8277 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
8278}
8279
8280
8281/** Opcode 0x05. */
8282FNIEMOP_DEF(iemOp_add_eAX_Iz)
8283{
8284 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
8285 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
8286}
8287
8288
8289/** Opcode 0x06. */
8290FNIEMOP_DEF(iemOp_push_ES)
8291{
8292 IEMOP_MNEMONIC(push_es, "push es");
8293 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
8294}
8295
8296
8297/** Opcode 0x07. */
8298FNIEMOP_DEF(iemOp_pop_ES)
8299{
8300 IEMOP_MNEMONIC(pop_es, "pop es");
8301 IEMOP_HLP_NO_64BIT();
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
8304}
8305
8306
8307/** Opcode 0x08. */
8308FNIEMOP_DEF(iemOp_or_Eb_Gb)
8309{
8310 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
8311 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8312 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
8313}
8314
8315
8316/** Opcode 0x09. */
8317FNIEMOP_DEF(iemOp_or_Ev_Gv)
8318{
8319 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
8320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8321 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
8322}
8323
8324
8325/** Opcode 0x0a. */
8326FNIEMOP_DEF(iemOp_or_Gb_Eb)
8327{
8328 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
8329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
8331}
8332
8333
8334/** Opcode 0x0b. */
8335FNIEMOP_DEF(iemOp_or_Gv_Ev)
8336{
8337 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
8338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8339 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
8340}
8341
8342
8343/** Opcode 0x0c. */
8344FNIEMOP_DEF(iemOp_or_Al_Ib)
8345{
8346 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
8347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
8349}
8350
8351
8352/** Opcode 0x0d. */
8353FNIEMOP_DEF(iemOp_or_eAX_Iz)
8354{
8355 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
8356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8357 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
8358}
8359
8360
8361/** Opcode 0x0e. */
8362FNIEMOP_DEF(iemOp_push_CS)
8363{
8364 IEMOP_MNEMONIC(push_cs, "push cs");
8365 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
8366}
8367
8368
8369/** Opcode 0x0f. */
8370FNIEMOP_DEF(iemOp_2byteEscape)
8371{
8372#ifdef VBOX_STRICT
8373 static bool s_fTested = false;
8374 if (RT_LIKELY(s_fTested)) { /* likely */ }
8375 else
8376 {
8377 s_fTested = true;
8378 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
8379 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
8380 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
8381 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
8382 }
8383#endif
8384
8385 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8386
8387 /** @todo PUSH CS on 8086, undefined on 80186. */
8388 IEMOP_HLP_MIN_286();
8389 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
8390}
8391
8392/** Opcode 0x10. */
8393FNIEMOP_DEF(iemOp_adc_Eb_Gb)
8394{
8395 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
8396 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
8397}
8398
8399
8400/** Opcode 0x11. */
8401FNIEMOP_DEF(iemOp_adc_Ev_Gv)
8402{
8403 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
8404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
8405}
8406
8407
8408/** Opcode 0x12. */
8409FNIEMOP_DEF(iemOp_adc_Gb_Eb)
8410{
8411 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
8412 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
8413}
8414
8415
8416/** Opcode 0x13. */
8417FNIEMOP_DEF(iemOp_adc_Gv_Ev)
8418{
8419 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
8420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
8421}
8422
8423
8424/** Opcode 0x14. */
8425FNIEMOP_DEF(iemOp_adc_Al_Ib)
8426{
8427 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
8428 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
8429}
8430
8431
8432/** Opcode 0x15. */
8433FNIEMOP_DEF(iemOp_adc_eAX_Iz)
8434{
8435 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
8436 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
8437}
8438
8439
8440/** Opcode 0x16. */
8441FNIEMOP_DEF(iemOp_push_SS)
8442{
8443 IEMOP_MNEMONIC(push_ss, "push ss");
8444 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
8445}
8446
8447
8448/** Opcode 0x17. */
8449FNIEMOP_DEF(iemOp_pop_SS)
8450{
8451 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453 IEMOP_HLP_NO_64BIT();
8454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
8455}
8456
8457
8458/** Opcode 0x18. */
8459FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
8460{
8461 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
8462 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
8463}
8464
8465
8466/** Opcode 0x19. */
8467FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
8468{
8469 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
8470 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
8471}
8472
8473
8474/** Opcode 0x1a. */
8475FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
8476{
8477 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
8478 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
8479}
8480
8481
8482/** Opcode 0x1b. */
8483FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
8484{
8485 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
8486 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
8487}
8488
8489
8490/** Opcode 0x1c. */
8491FNIEMOP_DEF(iemOp_sbb_Al_Ib)
8492{
8493 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
8494 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
8495}
8496
8497
8498/** Opcode 0x1d. */
8499FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
8500{
8501 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
8502 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
8503}
8504
8505
8506/** Opcode 0x1e. */
8507FNIEMOP_DEF(iemOp_push_DS)
8508{
8509 IEMOP_MNEMONIC(push_ds, "push ds");
8510 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
8511}
8512
8513
8514/** Opcode 0x1f. */
8515FNIEMOP_DEF(iemOp_pop_DS)
8516{
8517 IEMOP_MNEMONIC(pop_ds, "pop ds");
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEMOP_HLP_NO_64BIT();
8520 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
8521}
8522
8523
8524/** Opcode 0x20. */
8525FNIEMOP_DEF(iemOp_and_Eb_Gb)
8526{
8527 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
8528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8529 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
8530}
8531
8532
8533/** Opcode 0x21. */
8534FNIEMOP_DEF(iemOp_and_Ev_Gv)
8535{
8536 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
8537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
8539}
8540
8541
8542/** Opcode 0x22. */
8543FNIEMOP_DEF(iemOp_and_Gb_Eb)
8544{
8545 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
8546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8547 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
8548}
8549
8550
8551/** Opcode 0x23. */
8552FNIEMOP_DEF(iemOp_and_Gv_Ev)
8553{
8554 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
8555 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8556 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
8557}
8558
8559
8560/** Opcode 0x24. */
8561FNIEMOP_DEF(iemOp_and_Al_Ib)
8562{
8563 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
8564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8565 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
8566}
8567
8568
8569/** Opcode 0x25. */
8570FNIEMOP_DEF(iemOp_and_eAX_Iz)
8571{
8572 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
8573 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8574 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
8575}
8576
8577
8578/** Opcode 0x26. */
8579FNIEMOP_DEF(iemOp_seg_ES)
8580{
8581 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
8582 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
8583 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
8584
8585 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8586 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8587}
8588
8589
8590/** Opcode 0x27. */
8591FNIEMOP_DEF(iemOp_daa)
8592{
8593 IEMOP_MNEMONIC(daa_AL, "daa AL");
8594 IEMOP_HLP_NO_64BIT();
8595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8596 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8597 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
8598}
8599
8600
8601/** Opcode 0x28. */
8602FNIEMOP_DEF(iemOp_sub_Eb_Gb)
8603{
8604 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
8605 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
8606}
8607
8608
8609/** Opcode 0x29. */
8610FNIEMOP_DEF(iemOp_sub_Ev_Gv)
8611{
8612 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
8613 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
8614}
8615
8616
8617/** Opcode 0x2a. */
8618FNIEMOP_DEF(iemOp_sub_Gb_Eb)
8619{
8620 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
8621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
8622}
8623
8624
8625/** Opcode 0x2b. */
8626FNIEMOP_DEF(iemOp_sub_Gv_Ev)
8627{
8628 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
8629 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
8630}
8631
8632
8633/** Opcode 0x2c. */
8634FNIEMOP_DEF(iemOp_sub_Al_Ib)
8635{
8636 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
8637 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
8638}
8639
8640
8641/** Opcode 0x2d. */
8642FNIEMOP_DEF(iemOp_sub_eAX_Iz)
8643{
8644 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
8645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
8646}
8647
8648
8649/** Opcode 0x2e. */
8650FNIEMOP_DEF(iemOp_seg_CS)
8651{
8652 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
8653 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
8654 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
8655
8656 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8657 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8658}
8659
8660
8661/** Opcode 0x2f. */
8662FNIEMOP_DEF(iemOp_das)
8663{
8664 IEMOP_MNEMONIC(das_AL, "das AL");
8665 IEMOP_HLP_NO_64BIT();
8666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8668 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
8669}
8670
8671
8672/** Opcode 0x30. */
8673FNIEMOP_DEF(iemOp_xor_Eb_Gb)
8674{
8675 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
8676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8677 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
8678}
8679
8680
8681/** Opcode 0x31. */
8682FNIEMOP_DEF(iemOp_xor_Ev_Gv)
8683{
8684 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
8685 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8686 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
8687}
8688
8689
8690/** Opcode 0x32. */
8691FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8692{
8693 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8694 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8696}
8697
8698
8699/** Opcode 0x33. */
8700FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8701{
8702 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8703 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8704 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8705}
8706
8707
8708/** Opcode 0x34. */
8709FNIEMOP_DEF(iemOp_xor_Al_Ib)
8710{
8711 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8712 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8714}
8715
8716
8717/** Opcode 0x35. */
8718FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8719{
8720 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8722 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8723}
8724
8725
8726/** Opcode 0x36. */
8727FNIEMOP_DEF(iemOp_seg_SS)
8728{
8729 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8730 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8731 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8732
8733 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8734 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8735}
8736
8737
8738/** Opcode 0x37. */
8739FNIEMOP_STUB(iemOp_aaa);
8740
8741
8742/** Opcode 0x38. */
8743FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8744{
8745 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8746 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8747}
8748
8749
8750/** Opcode 0x39. */
8751FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8752{
8753 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8754 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8755}
8756
8757
8758/** Opcode 0x3a. */
8759FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8760{
8761 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8762 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8763}
8764
8765
8766/** Opcode 0x3b. */
8767FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8768{
8769 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8770 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8771}
8772
8773
8774/** Opcode 0x3c. */
8775FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8776{
8777 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8778 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8779}
8780
8781
8782/** Opcode 0x3d. */
8783FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8784{
8785 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8786 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8787}
8788
8789
8790/** Opcode 0x3e. */
8791FNIEMOP_DEF(iemOp_seg_DS)
8792{
8793 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8795 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8796
8797 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8798 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8799}
8800
8801
8802/** Opcode 0x3f. */
8803FNIEMOP_STUB(iemOp_aas);
8804
8805/**
8806 * Common 'inc/dec/not/neg register' helper.
8807 */
8808FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8809{
8810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8811 switch (pVCpu->iem.s.enmEffOpSize)
8812 {
8813 case IEMMODE_16BIT:
8814 IEM_MC_BEGIN(2, 0);
8815 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8816 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8817 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8818 IEM_MC_REF_EFLAGS(pEFlags);
8819 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8820 IEM_MC_ADVANCE_RIP();
8821 IEM_MC_END();
8822 return VINF_SUCCESS;
8823
8824 case IEMMODE_32BIT:
8825 IEM_MC_BEGIN(2, 0);
8826 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8827 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8828 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8829 IEM_MC_REF_EFLAGS(pEFlags);
8830 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8831 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8832 IEM_MC_ADVANCE_RIP();
8833 IEM_MC_END();
8834 return VINF_SUCCESS;
8835
8836 case IEMMODE_64BIT:
8837 IEM_MC_BEGIN(2, 0);
8838 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8839 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8840 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8841 IEM_MC_REF_EFLAGS(pEFlags);
8842 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8843 IEM_MC_ADVANCE_RIP();
8844 IEM_MC_END();
8845 return VINF_SUCCESS;
8846 }
8847 return VINF_SUCCESS;
8848}
8849
8850
8851/** Opcode 0x40. */
8852FNIEMOP_DEF(iemOp_inc_eAX)
8853{
8854 /*
8855 * This is a REX prefix in 64-bit mode.
8856 */
8857 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8858 {
8859 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8860 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8861
8862 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8863 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8864 }
8865
8866 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8867 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8868}
8869
8870
8871/** Opcode 0x41. */
8872FNIEMOP_DEF(iemOp_inc_eCX)
8873{
8874 /*
8875 * This is a REX prefix in 64-bit mode.
8876 */
8877 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8878 {
8879 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8880 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8881 pVCpu->iem.s.uRexB = 1 << 3;
8882
8883 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8884 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8885 }
8886
8887 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8888 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8889}
8890
8891
8892/** Opcode 0x42. */
8893FNIEMOP_DEF(iemOp_inc_eDX)
8894{
8895 /*
8896 * This is a REX prefix in 64-bit mode.
8897 */
8898 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8899 {
8900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8902 pVCpu->iem.s.uRexIndex = 1 << 3;
8903
8904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8906 }
8907
8908 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8909 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8910}
8911
8912
8913
8914/** Opcode 0x43. */
8915FNIEMOP_DEF(iemOp_inc_eBX)
8916{
8917 /*
8918 * This is a REX prefix in 64-bit mode.
8919 */
8920 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8921 {
8922 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8923 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8924 pVCpu->iem.s.uRexB = 1 << 3;
8925 pVCpu->iem.s.uRexIndex = 1 << 3;
8926
8927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8928 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8929 }
8930
8931 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8932 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8933}
8934
8935
8936/** Opcode 0x44. */
8937FNIEMOP_DEF(iemOp_inc_eSP)
8938{
8939 /*
8940 * This is a REX prefix in 64-bit mode.
8941 */
8942 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8943 {
8944 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8945 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8946 pVCpu->iem.s.uRexReg = 1 << 3;
8947
8948 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8949 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8950 }
8951
8952 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8953 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8954}
8955
8956
8957/** Opcode 0x45. */
8958FNIEMOP_DEF(iemOp_inc_eBP)
8959{
8960 /*
8961 * This is a REX prefix in 64-bit mode.
8962 */
8963 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8964 {
8965 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8966 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8967 pVCpu->iem.s.uRexReg = 1 << 3;
8968 pVCpu->iem.s.uRexB = 1 << 3;
8969
8970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8971 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8972 }
8973
8974 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8975 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8976}
8977
8978
8979/** Opcode 0x46. */
8980FNIEMOP_DEF(iemOp_inc_eSI)
8981{
8982 /*
8983 * This is a REX prefix in 64-bit mode.
8984 */
8985 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8986 {
8987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8989 pVCpu->iem.s.uRexReg = 1 << 3;
8990 pVCpu->iem.s.uRexIndex = 1 << 3;
8991
8992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8994 }
8995
8996 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8997 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8998}
8999
9000
9001/** Opcode 0x47. */
9002FNIEMOP_DEF(iemOp_inc_eDI)
9003{
9004 /*
9005 * This is a REX prefix in 64-bit mode.
9006 */
9007 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9008 {
9009 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
9010 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
9011 pVCpu->iem.s.uRexReg = 1 << 3;
9012 pVCpu->iem.s.uRexB = 1 << 3;
9013 pVCpu->iem.s.uRexIndex = 1 << 3;
9014
9015 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9016 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9017 }
9018
9019 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
9020 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
9021}
9022
9023
9024/** Opcode 0x48. */
9025FNIEMOP_DEF(iemOp_dec_eAX)
9026{
9027 /*
9028 * This is a REX prefix in 64-bit mode.
9029 */
9030 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9031 {
9032 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
9033 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
9034 iemRecalEffOpSize(pVCpu);
9035
9036 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9037 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9038 }
9039
9040 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
9041 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
9042}
9043
9044
9045/** Opcode 0x49. */
9046FNIEMOP_DEF(iemOp_dec_eCX)
9047{
9048 /*
9049 * This is a REX prefix in 64-bit mode.
9050 */
9051 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9052 {
9053 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
9054 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9055 pVCpu->iem.s.uRexB = 1 << 3;
9056 iemRecalEffOpSize(pVCpu);
9057
9058 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9059 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9060 }
9061
9062 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
9063 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
9064}
9065
9066
9067/** Opcode 0x4a. */
9068FNIEMOP_DEF(iemOp_dec_eDX)
9069{
9070 /*
9071 * This is a REX prefix in 64-bit mode.
9072 */
9073 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9074 {
9075 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
9076 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9077 pVCpu->iem.s.uRexIndex = 1 << 3;
9078 iemRecalEffOpSize(pVCpu);
9079
9080 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9081 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9082 }
9083
9084 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
9085 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
9086}
9087
9088
9089/** Opcode 0x4b. */
9090FNIEMOP_DEF(iemOp_dec_eBX)
9091{
9092 /*
9093 * This is a REX prefix in 64-bit mode.
9094 */
9095 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9096 {
9097 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
9098 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9099 pVCpu->iem.s.uRexB = 1 << 3;
9100 pVCpu->iem.s.uRexIndex = 1 << 3;
9101 iemRecalEffOpSize(pVCpu);
9102
9103 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9104 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9105 }
9106
9107 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
9108 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
9109}
9110
9111
9112/** Opcode 0x4c. */
9113FNIEMOP_DEF(iemOp_dec_eSP)
9114{
9115 /*
9116 * This is a REX prefix in 64-bit mode.
9117 */
9118 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9119 {
9120 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
9121 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
9122 pVCpu->iem.s.uRexReg = 1 << 3;
9123 iemRecalEffOpSize(pVCpu);
9124
9125 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9126 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9127 }
9128
9129 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
9130 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
9131}
9132
9133
9134/** Opcode 0x4d. */
9135FNIEMOP_DEF(iemOp_dec_eBP)
9136{
9137 /*
9138 * This is a REX prefix in 64-bit mode.
9139 */
9140 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9141 {
9142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
9143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
9144 pVCpu->iem.s.uRexReg = 1 << 3;
9145 pVCpu->iem.s.uRexB = 1 << 3;
9146 iemRecalEffOpSize(pVCpu);
9147
9148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9150 }
9151
9152 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
9153 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
9154}
9155
9156
9157/** Opcode 0x4e. */
9158FNIEMOP_DEF(iemOp_dec_eSI)
9159{
9160 /*
9161 * This is a REX prefix in 64-bit mode.
9162 */
9163 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9164 {
9165 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
9166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9167 pVCpu->iem.s.uRexReg = 1 << 3;
9168 pVCpu->iem.s.uRexIndex = 1 << 3;
9169 iemRecalEffOpSize(pVCpu);
9170
9171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9173 }
9174
9175 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
9176 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
9177}
9178
9179
9180/** Opcode 0x4f. */
9181FNIEMOP_DEF(iemOp_dec_eDI)
9182{
9183 /*
9184 * This is a REX prefix in 64-bit mode.
9185 */
9186 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9187 {
9188 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
9189 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
9190 pVCpu->iem.s.uRexReg = 1 << 3;
9191 pVCpu->iem.s.uRexB = 1 << 3;
9192 pVCpu->iem.s.uRexIndex = 1 << 3;
9193 iemRecalEffOpSize(pVCpu);
9194
9195 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9196 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9197 }
9198
9199 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
9200 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
9201}
9202
9203
9204/**
9205 * Common 'push register' helper.
9206 */
9207FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
9208{
9209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9210 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9211 {
9212 iReg |= pVCpu->iem.s.uRexB;
9213 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9214 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9215 }
9216
9217 switch (pVCpu->iem.s.enmEffOpSize)
9218 {
9219 case IEMMODE_16BIT:
9220 IEM_MC_BEGIN(0, 1);
9221 IEM_MC_LOCAL(uint16_t, u16Value);
9222 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
9223 IEM_MC_PUSH_U16(u16Value);
9224 IEM_MC_ADVANCE_RIP();
9225 IEM_MC_END();
9226 break;
9227
9228 case IEMMODE_32BIT:
9229 IEM_MC_BEGIN(0, 1);
9230 IEM_MC_LOCAL(uint32_t, u32Value);
9231 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
9232 IEM_MC_PUSH_U32(u32Value);
9233 IEM_MC_ADVANCE_RIP();
9234 IEM_MC_END();
9235 break;
9236
9237 case IEMMODE_64BIT:
9238 IEM_MC_BEGIN(0, 1);
9239 IEM_MC_LOCAL(uint64_t, u64Value);
9240 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
9241 IEM_MC_PUSH_U64(u64Value);
9242 IEM_MC_ADVANCE_RIP();
9243 IEM_MC_END();
9244 break;
9245 }
9246
9247 return VINF_SUCCESS;
9248}
9249
9250
9251/** Opcode 0x50. */
9252FNIEMOP_DEF(iemOp_push_eAX)
9253{
9254 IEMOP_MNEMONIC(push_rAX, "push rAX");
9255 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
9256}
9257
9258
9259/** Opcode 0x51. */
9260FNIEMOP_DEF(iemOp_push_eCX)
9261{
9262 IEMOP_MNEMONIC(push_rCX, "push rCX");
9263 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
9264}
9265
9266
9267/** Opcode 0x52. */
9268FNIEMOP_DEF(iemOp_push_eDX)
9269{
9270 IEMOP_MNEMONIC(push_rDX, "push rDX");
9271 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
9272}
9273
9274
9275/** Opcode 0x53. */
9276FNIEMOP_DEF(iemOp_push_eBX)
9277{
9278 IEMOP_MNEMONIC(push_rBX, "push rBX");
9279 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
9280}
9281
9282
9283/** Opcode 0x54. */
9284FNIEMOP_DEF(iemOp_push_eSP)
9285{
9286 IEMOP_MNEMONIC(push_rSP, "push rSP");
9287 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
9288 {
9289 IEM_MC_BEGIN(0, 1);
9290 IEM_MC_LOCAL(uint16_t, u16Value);
9291 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
9292 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
9293 IEM_MC_PUSH_U16(u16Value);
9294 IEM_MC_ADVANCE_RIP();
9295 IEM_MC_END();
9296 }
9297 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
9298}
9299
9300
9301/** Opcode 0x55. */
9302FNIEMOP_DEF(iemOp_push_eBP)
9303{
9304 IEMOP_MNEMONIC(push_rBP, "push rBP");
9305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
9306}
9307
9308
9309/** Opcode 0x56. */
9310FNIEMOP_DEF(iemOp_push_eSI)
9311{
9312 IEMOP_MNEMONIC(push_rSI, "push rSI");
9313 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
9314}
9315
9316
9317/** Opcode 0x57. */
9318FNIEMOP_DEF(iemOp_push_eDI)
9319{
9320 IEMOP_MNEMONIC(push_rDI, "push rDI");
9321 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
9322}
9323
9324
9325/**
9326 * Common 'pop register' helper.
9327 */
9328FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
9329{
9330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9331 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9332 {
9333 iReg |= pVCpu->iem.s.uRexB;
9334 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9335 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9336 }
9337
9338 switch (pVCpu->iem.s.enmEffOpSize)
9339 {
9340 case IEMMODE_16BIT:
9341 IEM_MC_BEGIN(0, 1);
9342 IEM_MC_LOCAL(uint16_t *, pu16Dst);
9343 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
9344 IEM_MC_POP_U16(pu16Dst);
9345 IEM_MC_ADVANCE_RIP();
9346 IEM_MC_END();
9347 break;
9348
9349 case IEMMODE_32BIT:
9350 IEM_MC_BEGIN(0, 1);
9351 IEM_MC_LOCAL(uint32_t *, pu32Dst);
9352 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
9353 IEM_MC_POP_U32(pu32Dst);
9354 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
9355 IEM_MC_ADVANCE_RIP();
9356 IEM_MC_END();
9357 break;
9358
9359 case IEMMODE_64BIT:
9360 IEM_MC_BEGIN(0, 1);
9361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
9362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
9363 IEM_MC_POP_U64(pu64Dst);
9364 IEM_MC_ADVANCE_RIP();
9365 IEM_MC_END();
9366 break;
9367 }
9368
9369 return VINF_SUCCESS;
9370}
9371
9372
9373/** Opcode 0x58. */
9374FNIEMOP_DEF(iemOp_pop_eAX)
9375{
9376 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
9377 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
9378}
9379
9380
9381/** Opcode 0x59. */
9382FNIEMOP_DEF(iemOp_pop_eCX)
9383{
9384 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
9385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
9386}
9387
9388
9389/** Opcode 0x5a. */
9390FNIEMOP_DEF(iemOp_pop_eDX)
9391{
9392 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
9393 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
9394}
9395
9396
9397/** Opcode 0x5b. */
9398FNIEMOP_DEF(iemOp_pop_eBX)
9399{
9400 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
9401 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
9402}
9403
9404
9405/** Opcode 0x5c. */
9406FNIEMOP_DEF(iemOp_pop_eSP)
9407{
9408 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
9409 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
9410 {
9411 if (pVCpu->iem.s.uRexB)
9412 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
9413 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
9414 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
9415 }
9416
9417 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
9418 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
9419 /** @todo add testcase for this instruction. */
9420 switch (pVCpu->iem.s.enmEffOpSize)
9421 {
9422 case IEMMODE_16BIT:
9423 IEM_MC_BEGIN(0, 1);
9424 IEM_MC_LOCAL(uint16_t, u16Dst);
9425 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
9426 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
9427 IEM_MC_ADVANCE_RIP();
9428 IEM_MC_END();
9429 break;
9430
9431 case IEMMODE_32BIT:
9432 IEM_MC_BEGIN(0, 1);
9433 IEM_MC_LOCAL(uint32_t, u32Dst);
9434 IEM_MC_POP_U32(&u32Dst);
9435 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
9436 IEM_MC_ADVANCE_RIP();
9437 IEM_MC_END();
9438 break;
9439
9440 case IEMMODE_64BIT:
9441 IEM_MC_BEGIN(0, 1);
9442 IEM_MC_LOCAL(uint64_t, u64Dst);
9443 IEM_MC_POP_U64(&u64Dst);
9444 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
9445 IEM_MC_ADVANCE_RIP();
9446 IEM_MC_END();
9447 break;
9448 }
9449
9450 return VINF_SUCCESS;
9451}
9452
9453
9454/** Opcode 0x5d. */
9455FNIEMOP_DEF(iemOp_pop_eBP)
9456{
9457 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
9458 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
9459}
9460
9461
9462/** Opcode 0x5e. */
9463FNIEMOP_DEF(iemOp_pop_eSI)
9464{
9465 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
9466 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
9467}
9468
9469
9470/** Opcode 0x5f. */
9471FNIEMOP_DEF(iemOp_pop_eDI)
9472{
9473 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
9474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
9475}
9476
9477
9478/** Opcode 0x60. */
9479FNIEMOP_DEF(iemOp_pusha)
9480{
9481 IEMOP_MNEMONIC(pusha, "pusha");
9482 IEMOP_HLP_MIN_186();
9483 IEMOP_HLP_NO_64BIT();
9484 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
9486 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9487 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
9488}
9489
9490
9491/** Opcode 0x61. */
9492FNIEMOP_DEF(iemOp_popa)
9493{
9494 IEMOP_MNEMONIC(popa, "popa");
9495 IEMOP_HLP_MIN_186();
9496 IEMOP_HLP_NO_64BIT();
9497 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
9498 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
9499 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
9500 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
9501}
9502
9503
9504/** Opcode 0x62. */
9505FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
9506// IEMOP_HLP_MIN_186();
9507
9508
9509/** Opcode 0x63 - non-64-bit modes. */
9510FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
9511{
9512 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
9513 IEMOP_HLP_MIN_286();
9514 IEMOP_HLP_NO_REAL_OR_V86_MODE();
9515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9516
9517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9518 {
9519 /* Register */
9520 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9521 IEM_MC_BEGIN(3, 0);
9522 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9523 IEM_MC_ARG(uint16_t, u16Src, 1);
9524 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9525
9526 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9527 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
9528 IEM_MC_REF_EFLAGS(pEFlags);
9529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9530
9531 IEM_MC_ADVANCE_RIP();
9532 IEM_MC_END();
9533 }
9534 else
9535 {
9536 /* Memory */
9537 IEM_MC_BEGIN(3, 2);
9538 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9539 IEM_MC_ARG(uint16_t, u16Src, 1);
9540 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9542
9543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9544 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
9545 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9546 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9547 IEM_MC_FETCH_EFLAGS(EFlags);
9548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
9549
9550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9551 IEM_MC_COMMIT_EFLAGS(EFlags);
9552 IEM_MC_ADVANCE_RIP();
9553 IEM_MC_END();
9554 }
9555 return VINF_SUCCESS;
9556
9557}
9558
9559
9560/** Opcode 0x63.
9561 * @note This is a weird one. It works like a regular move instruction if
9562 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
9563 * @todo This definitely needs a testcase to verify the odd cases. */
9564FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
9565{
9566 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
9567
9568 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
9569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9570
9571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9572 {
9573 /*
9574 * Register to register.
9575 */
9576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9577 IEM_MC_BEGIN(0, 1);
9578 IEM_MC_LOCAL(uint64_t, u64Value);
9579 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9580 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9581 IEM_MC_ADVANCE_RIP();
9582 IEM_MC_END();
9583 }
9584 else
9585 {
9586 /*
9587 * We're loading a register from memory.
9588 */
9589 IEM_MC_BEGIN(0, 2);
9590 IEM_MC_LOCAL(uint64_t, u64Value);
9591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9594 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9595 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
9596 IEM_MC_ADVANCE_RIP();
9597 IEM_MC_END();
9598 }
9599 return VINF_SUCCESS;
9600}
9601
9602
9603/** Opcode 0x64. */
9604FNIEMOP_DEF(iemOp_seg_FS)
9605{
9606 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
9607 IEMOP_HLP_MIN_386();
9608
9609 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
9610 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
9611
9612 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9613 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9614}
9615
9616
9617/** Opcode 0x65. */
9618FNIEMOP_DEF(iemOp_seg_GS)
9619{
9620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
9621 IEMOP_HLP_MIN_386();
9622
9623 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
9624 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
9625
9626 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9627 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9628}
9629
9630
9631/** Opcode 0x66. */
9632FNIEMOP_DEF(iemOp_op_size)
9633{
9634 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
9635 IEMOP_HLP_MIN_386();
9636
9637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
9638 iemRecalEffOpSize(pVCpu);
9639
9640 /* For the 4 entry opcode tables, the operand prefix doesn't not count
9641 when REPZ or REPNZ are present. */
9642 if (pVCpu->iem.s.idxPrefix == 0)
9643 pVCpu->iem.s.idxPrefix = 1;
9644
9645 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9646 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9647}
9648
9649
9650/** Opcode 0x67. */
9651FNIEMOP_DEF(iemOp_addr_size)
9652{
9653 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
9654 IEMOP_HLP_MIN_386();
9655
9656 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
9657 switch (pVCpu->iem.s.enmDefAddrMode)
9658 {
9659 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9660 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
9661 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
9662 default: AssertFailed();
9663 }
9664
9665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9667}
9668
9669
9670/** Opcode 0x68. */
9671FNIEMOP_DEF(iemOp_push_Iz)
9672{
9673 IEMOP_MNEMONIC(push_Iz, "push Iz");
9674 IEMOP_HLP_MIN_186();
9675 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9676 switch (pVCpu->iem.s.enmEffOpSize)
9677 {
9678 case IEMMODE_16BIT:
9679 {
9680 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9682 IEM_MC_BEGIN(0,0);
9683 IEM_MC_PUSH_U16(u16Imm);
9684 IEM_MC_ADVANCE_RIP();
9685 IEM_MC_END();
9686 return VINF_SUCCESS;
9687 }
9688
9689 case IEMMODE_32BIT:
9690 {
9691 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9693 IEM_MC_BEGIN(0,0);
9694 IEM_MC_PUSH_U32(u32Imm);
9695 IEM_MC_ADVANCE_RIP();
9696 IEM_MC_END();
9697 return VINF_SUCCESS;
9698 }
9699
9700 case IEMMODE_64BIT:
9701 {
9702 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_BEGIN(0,0);
9705 IEM_MC_PUSH_U64(u64Imm);
9706 IEM_MC_ADVANCE_RIP();
9707 IEM_MC_END();
9708 return VINF_SUCCESS;
9709 }
9710
9711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9712 }
9713}
9714
9715
9716/** Opcode 0x69. */
9717FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9718{
9719 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9720 IEMOP_HLP_MIN_186();
9721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9722 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9723
9724 switch (pVCpu->iem.s.enmEffOpSize)
9725 {
9726 case IEMMODE_16BIT:
9727 {
9728 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9729 {
9730 /* register operand */
9731 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9733
9734 IEM_MC_BEGIN(3, 1);
9735 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9736 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9737 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9738 IEM_MC_LOCAL(uint16_t, u16Tmp);
9739
9740 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9741 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9742 IEM_MC_REF_EFLAGS(pEFlags);
9743 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9744 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9745
9746 IEM_MC_ADVANCE_RIP();
9747 IEM_MC_END();
9748 }
9749 else
9750 {
9751 /* memory operand */
9752 IEM_MC_BEGIN(3, 2);
9753 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9754 IEM_MC_ARG(uint16_t, u16Src, 1);
9755 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9756 IEM_MC_LOCAL(uint16_t, u16Tmp);
9757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9758
9759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9760 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9761 IEM_MC_ASSIGN(u16Src, u16Imm);
9762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9763 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9764 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9765 IEM_MC_REF_EFLAGS(pEFlags);
9766 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9767 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9768
9769 IEM_MC_ADVANCE_RIP();
9770 IEM_MC_END();
9771 }
9772 return VINF_SUCCESS;
9773 }
9774
9775 case IEMMODE_32BIT:
9776 {
9777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9778 {
9779 /* register operand */
9780 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9782
9783 IEM_MC_BEGIN(3, 1);
9784 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9785 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9786 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9787 IEM_MC_LOCAL(uint32_t, u32Tmp);
9788
9789 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9790 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9791 IEM_MC_REF_EFLAGS(pEFlags);
9792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9793 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9794
9795 IEM_MC_ADVANCE_RIP();
9796 IEM_MC_END();
9797 }
9798 else
9799 {
9800 /* memory operand */
9801 IEM_MC_BEGIN(3, 2);
9802 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9803 IEM_MC_ARG(uint32_t, u32Src, 1);
9804 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9805 IEM_MC_LOCAL(uint32_t, u32Tmp);
9806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9807
9808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9809 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9810 IEM_MC_ASSIGN(u32Src, u32Imm);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9813 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9814 IEM_MC_REF_EFLAGS(pEFlags);
9815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9816 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9817
9818 IEM_MC_ADVANCE_RIP();
9819 IEM_MC_END();
9820 }
9821 return VINF_SUCCESS;
9822 }
9823
9824 case IEMMODE_64BIT:
9825 {
9826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9827 {
9828 /* register operand */
9829 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831
9832 IEM_MC_BEGIN(3, 1);
9833 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9834 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9835 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9836 IEM_MC_LOCAL(uint64_t, u64Tmp);
9837
9838 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9839 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9840 IEM_MC_REF_EFLAGS(pEFlags);
9841 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9842 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9843
9844 IEM_MC_ADVANCE_RIP();
9845 IEM_MC_END();
9846 }
9847 else
9848 {
9849 /* memory operand */
9850 IEM_MC_BEGIN(3, 2);
9851 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9852 IEM_MC_ARG(uint64_t, u64Src, 1);
9853 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9854 IEM_MC_LOCAL(uint64_t, u64Tmp);
9855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9856
9857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9858 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9859 IEM_MC_ASSIGN(u64Src, u64Imm);
9860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9861 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9862 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9863 IEM_MC_REF_EFLAGS(pEFlags);
9864 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9865 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9866
9867 IEM_MC_ADVANCE_RIP();
9868 IEM_MC_END();
9869 }
9870 return VINF_SUCCESS;
9871 }
9872 }
9873 AssertFailedReturn(VERR_IEM_IPE_9);
9874}
9875
9876
9877/** Opcode 0x6a. */
9878FNIEMOP_DEF(iemOp_push_Ib)
9879{
9880 IEMOP_MNEMONIC(push_Ib, "push Ib");
9881 IEMOP_HLP_MIN_186();
9882 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9884 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9885
9886 IEM_MC_BEGIN(0,0);
9887 switch (pVCpu->iem.s.enmEffOpSize)
9888 {
9889 case IEMMODE_16BIT:
9890 IEM_MC_PUSH_U16(i8Imm);
9891 break;
9892 case IEMMODE_32BIT:
9893 IEM_MC_PUSH_U32(i8Imm);
9894 break;
9895 case IEMMODE_64BIT:
9896 IEM_MC_PUSH_U64(i8Imm);
9897 break;
9898 }
9899 IEM_MC_ADVANCE_RIP();
9900 IEM_MC_END();
9901 return VINF_SUCCESS;
9902}
9903
9904
9905/** Opcode 0x6b. */
9906FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9907{
9908 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9909 IEMOP_HLP_MIN_186();
9910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9912
9913 switch (pVCpu->iem.s.enmEffOpSize)
9914 {
9915 case IEMMODE_16BIT:
9916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9917 {
9918 /* register operand */
9919 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9921
9922 IEM_MC_BEGIN(3, 1);
9923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9924 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9926 IEM_MC_LOCAL(uint16_t, u16Tmp);
9927
9928 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9929 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9930 IEM_MC_REF_EFLAGS(pEFlags);
9931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9932 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9933
9934 IEM_MC_ADVANCE_RIP();
9935 IEM_MC_END();
9936 }
9937 else
9938 {
9939 /* memory operand */
9940 IEM_MC_BEGIN(3, 2);
9941 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9942 IEM_MC_ARG(uint16_t, u16Src, 1);
9943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9944 IEM_MC_LOCAL(uint16_t, u16Tmp);
9945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9946
9947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9948 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9949 IEM_MC_ASSIGN(u16Src, u16Imm);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9952 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9953 IEM_MC_REF_EFLAGS(pEFlags);
9954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9955 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9956
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 }
9960 return VINF_SUCCESS;
9961
9962 case IEMMODE_32BIT:
9963 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9964 {
9965 /* register operand */
9966 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9968
9969 IEM_MC_BEGIN(3, 1);
9970 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9971 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9972 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9973 IEM_MC_LOCAL(uint32_t, u32Tmp);
9974
9975 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9976 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9977 IEM_MC_REF_EFLAGS(pEFlags);
9978 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9979 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9980
9981 IEM_MC_ADVANCE_RIP();
9982 IEM_MC_END();
9983 }
9984 else
9985 {
9986 /* memory operand */
9987 IEM_MC_BEGIN(3, 2);
9988 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9989 IEM_MC_ARG(uint32_t, u32Src, 1);
9990 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9991 IEM_MC_LOCAL(uint32_t, u32Tmp);
9992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9993
9994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9995 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9996 IEM_MC_ASSIGN(u32Src, u32Imm);
9997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9998 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9999 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
10000 IEM_MC_REF_EFLAGS(pEFlags);
10001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
10002 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
10003
10004 IEM_MC_ADVANCE_RIP();
10005 IEM_MC_END();
10006 }
10007 return VINF_SUCCESS;
10008
10009 case IEMMODE_64BIT:
10010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10011 {
10012 /* register operand */
10013 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10015
10016 IEM_MC_BEGIN(3, 1);
10017 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10018 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
10019 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10020 IEM_MC_LOCAL(uint64_t, u64Tmp);
10021
10022 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10023 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10024 IEM_MC_REF_EFLAGS(pEFlags);
10025 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10026 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10027
10028 IEM_MC_ADVANCE_RIP();
10029 IEM_MC_END();
10030 }
10031 else
10032 {
10033 /* memory operand */
10034 IEM_MC_BEGIN(3, 2);
10035 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10036 IEM_MC_ARG(uint64_t, u64Src, 1);
10037 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10038 IEM_MC_LOCAL(uint64_t, u64Tmp);
10039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10040
10041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10042 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
10043 IEM_MC_ASSIGN(u64Src, u64Imm);
10044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10045 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10046 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
10047 IEM_MC_REF_EFLAGS(pEFlags);
10048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
10049 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
10050
10051 IEM_MC_ADVANCE_RIP();
10052 IEM_MC_END();
10053 }
10054 return VINF_SUCCESS;
10055 }
10056 AssertFailedReturn(VERR_IEM_IPE_8);
10057}
10058
10059
10060/** Opcode 0x6c. */
10061FNIEMOP_DEF(iemOp_insb_Yb_DX)
10062{
10063 IEMOP_HLP_MIN_186();
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10065 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10066 {
10067 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
10068 switch (pVCpu->iem.s.enmEffAddrMode)
10069 {
10070 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
10071 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
10072 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
10073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10074 }
10075 }
10076 else
10077 {
10078 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
10079 switch (pVCpu->iem.s.enmEffAddrMode)
10080 {
10081 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
10082 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
10083 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
10084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10085 }
10086 }
10087}
10088
10089
10090/** Opcode 0x6d. */
10091FNIEMOP_DEF(iemOp_inswd_Yv_DX)
10092{
10093 IEMOP_HLP_MIN_186();
10094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10095 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10096 {
10097 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
10098 switch (pVCpu->iem.s.enmEffOpSize)
10099 {
10100 case IEMMODE_16BIT:
10101 switch (pVCpu->iem.s.enmEffAddrMode)
10102 {
10103 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
10104 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
10105 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
10106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10107 }
10108 break;
10109 case IEMMODE_64BIT:
10110 case IEMMODE_32BIT:
10111 switch (pVCpu->iem.s.enmEffAddrMode)
10112 {
10113 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
10114 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
10115 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
10116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10117 }
10118 break;
10119 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10120 }
10121 }
10122 else
10123 {
10124 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
10125 switch (pVCpu->iem.s.enmEffOpSize)
10126 {
10127 case IEMMODE_16BIT:
10128 switch (pVCpu->iem.s.enmEffAddrMode)
10129 {
10130 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
10131 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
10132 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
10133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10134 }
10135 break;
10136 case IEMMODE_64BIT:
10137 case IEMMODE_32BIT:
10138 switch (pVCpu->iem.s.enmEffAddrMode)
10139 {
10140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
10141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
10142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
10143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10144 }
10145 break;
10146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10147 }
10148 }
10149}
10150
10151
10152/** Opcode 0x6e. */
10153FNIEMOP_DEF(iemOp_outsb_Yb_DX)
10154{
10155 IEMOP_HLP_MIN_186();
10156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10157 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10158 {
10159 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
10160 switch (pVCpu->iem.s.enmEffAddrMode)
10161 {
10162 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10166 }
10167 }
10168 else
10169 {
10170 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
10171 switch (pVCpu->iem.s.enmEffAddrMode)
10172 {
10173 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
10174 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
10175 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
10176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10177 }
10178 }
10179}
10180
10181
10182/** Opcode 0x6f. */
10183FNIEMOP_DEF(iemOp_outswd_Yv_DX)
10184{
10185 IEMOP_HLP_MIN_186();
10186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10187 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
10188 {
10189 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
10190 switch (pVCpu->iem.s.enmEffOpSize)
10191 {
10192 case IEMMODE_16BIT:
10193 switch (pVCpu->iem.s.enmEffAddrMode)
10194 {
10195 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10196 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10197 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10199 }
10200 break;
10201 case IEMMODE_64BIT:
10202 case IEMMODE_32BIT:
10203 switch (pVCpu->iem.s.enmEffAddrMode)
10204 {
10205 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10206 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10207 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10209 }
10210 break;
10211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10212 }
10213 }
10214 else
10215 {
10216 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
10217 switch (pVCpu->iem.s.enmEffOpSize)
10218 {
10219 case IEMMODE_16BIT:
10220 switch (pVCpu->iem.s.enmEffAddrMode)
10221 {
10222 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
10223 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
10224 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
10225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10226 }
10227 break;
10228 case IEMMODE_64BIT:
10229 case IEMMODE_32BIT:
10230 switch (pVCpu->iem.s.enmEffAddrMode)
10231 {
10232 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
10233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
10234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
10235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10236 }
10237 break;
10238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10239 }
10240 }
10241}
10242
10243
10244/** Opcode 0x70. */
10245FNIEMOP_DEF(iemOp_jo_Jb)
10246{
10247 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
10248 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10250 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10251
10252 IEM_MC_BEGIN(0, 0);
10253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10254 IEM_MC_REL_JMP_S8(i8Imm);
10255 } IEM_MC_ELSE() {
10256 IEM_MC_ADVANCE_RIP();
10257 } IEM_MC_ENDIF();
10258 IEM_MC_END();
10259 return VINF_SUCCESS;
10260}
10261
10262
10263/** Opcode 0x71. */
10264FNIEMOP_DEF(iemOp_jno_Jb)
10265{
10266 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
10267 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10269 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10270
10271 IEM_MC_BEGIN(0, 0);
10272 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
10273 IEM_MC_ADVANCE_RIP();
10274 } IEM_MC_ELSE() {
10275 IEM_MC_REL_JMP_S8(i8Imm);
10276 } IEM_MC_ENDIF();
10277 IEM_MC_END();
10278 return VINF_SUCCESS;
10279}
10280
10281/** Opcode 0x72. */
10282FNIEMOP_DEF(iemOp_jc_Jb)
10283{
10284 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
10285 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10288
10289 IEM_MC_BEGIN(0, 0);
10290 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10291 IEM_MC_REL_JMP_S8(i8Imm);
10292 } IEM_MC_ELSE() {
10293 IEM_MC_ADVANCE_RIP();
10294 } IEM_MC_ENDIF();
10295 IEM_MC_END();
10296 return VINF_SUCCESS;
10297}
10298
10299
10300/** Opcode 0x73. */
10301FNIEMOP_DEF(iemOp_jnc_Jb)
10302{
10303 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
10304 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10307
10308 IEM_MC_BEGIN(0, 0);
10309 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10310 IEM_MC_ADVANCE_RIP();
10311 } IEM_MC_ELSE() {
10312 IEM_MC_REL_JMP_S8(i8Imm);
10313 } IEM_MC_ENDIF();
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316}
10317
10318
10319/** Opcode 0x74. */
10320FNIEMOP_DEF(iemOp_je_Jb)
10321{
10322 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
10323 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10325 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10326
10327 IEM_MC_BEGIN(0, 0);
10328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10329 IEM_MC_REL_JMP_S8(i8Imm);
10330 } IEM_MC_ELSE() {
10331 IEM_MC_ADVANCE_RIP();
10332 } IEM_MC_ENDIF();
10333 IEM_MC_END();
10334 return VINF_SUCCESS;
10335}
10336
10337
10338/** Opcode 0x75. */
10339FNIEMOP_DEF(iemOp_jne_Jb)
10340{
10341 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
10342 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10345
10346 IEM_MC_BEGIN(0, 0);
10347 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10348 IEM_MC_ADVANCE_RIP();
10349 } IEM_MC_ELSE() {
10350 IEM_MC_REL_JMP_S8(i8Imm);
10351 } IEM_MC_ENDIF();
10352 IEM_MC_END();
10353 return VINF_SUCCESS;
10354}
10355
10356
10357/** Opcode 0x76. */
10358FNIEMOP_DEF(iemOp_jbe_Jb)
10359{
10360 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
10361 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10363 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10364
10365 IEM_MC_BEGIN(0, 0);
10366 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10367 IEM_MC_REL_JMP_S8(i8Imm);
10368 } IEM_MC_ELSE() {
10369 IEM_MC_ADVANCE_RIP();
10370 } IEM_MC_ENDIF();
10371 IEM_MC_END();
10372 return VINF_SUCCESS;
10373}
10374
10375
10376/** Opcode 0x77. */
10377FNIEMOP_DEF(iemOp_jnbe_Jb)
10378{
10379 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
10380 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10382 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10383
10384 IEM_MC_BEGIN(0, 0);
10385 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10386 IEM_MC_ADVANCE_RIP();
10387 } IEM_MC_ELSE() {
10388 IEM_MC_REL_JMP_S8(i8Imm);
10389 } IEM_MC_ENDIF();
10390 IEM_MC_END();
10391 return VINF_SUCCESS;
10392}
10393
10394
10395/** Opcode 0x78. */
10396FNIEMOP_DEF(iemOp_js_Jb)
10397{
10398 IEMOP_MNEMONIC(js_Jb, "js Jb");
10399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10401 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10402
10403 IEM_MC_BEGIN(0, 0);
10404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10405 IEM_MC_REL_JMP_S8(i8Imm);
10406 } IEM_MC_ELSE() {
10407 IEM_MC_ADVANCE_RIP();
10408 } IEM_MC_ENDIF();
10409 IEM_MC_END();
10410 return VINF_SUCCESS;
10411}
10412
10413
10414/** Opcode 0x79. */
10415FNIEMOP_DEF(iemOp_jns_Jb)
10416{
10417 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
10418 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10420 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10421
10422 IEM_MC_BEGIN(0, 0);
10423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
10424 IEM_MC_ADVANCE_RIP();
10425 } IEM_MC_ELSE() {
10426 IEM_MC_REL_JMP_S8(i8Imm);
10427 } IEM_MC_ENDIF();
10428 IEM_MC_END();
10429 return VINF_SUCCESS;
10430}
10431
10432
10433/** Opcode 0x7a. */
10434FNIEMOP_DEF(iemOp_jp_Jb)
10435{
10436 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
10437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10440
10441 IEM_MC_BEGIN(0, 0);
10442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10443 IEM_MC_REL_JMP_S8(i8Imm);
10444 } IEM_MC_ELSE() {
10445 IEM_MC_ADVANCE_RIP();
10446 } IEM_MC_ENDIF();
10447 IEM_MC_END();
10448 return VINF_SUCCESS;
10449}
10450
10451
10452/** Opcode 0x7b. */
10453FNIEMOP_DEF(iemOp_jnp_Jb)
10454{
10455 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
10456 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10459
10460 IEM_MC_BEGIN(0, 0);
10461 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10462 IEM_MC_ADVANCE_RIP();
10463 } IEM_MC_ELSE() {
10464 IEM_MC_REL_JMP_S8(i8Imm);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_END();
10467 return VINF_SUCCESS;
10468}
10469
10470
10471/** Opcode 0x7c. */
10472FNIEMOP_DEF(iemOp_jl_Jb)
10473{
10474 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
10475 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10477 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10478
10479 IEM_MC_BEGIN(0, 0);
10480 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10481 IEM_MC_REL_JMP_S8(i8Imm);
10482 } IEM_MC_ELSE() {
10483 IEM_MC_ADVANCE_RIP();
10484 } IEM_MC_ENDIF();
10485 IEM_MC_END();
10486 return VINF_SUCCESS;
10487}
10488
10489
10490/** Opcode 0x7d. */
10491FNIEMOP_DEF(iemOp_jnl_Jb)
10492{
10493 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
10494 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10497
10498 IEM_MC_BEGIN(0, 0);
10499 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
10500 IEM_MC_ADVANCE_RIP();
10501 } IEM_MC_ELSE() {
10502 IEM_MC_REL_JMP_S8(i8Imm);
10503 } IEM_MC_ENDIF();
10504 IEM_MC_END();
10505 return VINF_SUCCESS;
10506}
10507
10508
10509/** Opcode 0x7e. */
10510FNIEMOP_DEF(iemOp_jle_Jb)
10511{
10512 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
10513 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10516
10517 IEM_MC_BEGIN(0, 0);
10518 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10519 IEM_MC_REL_JMP_S8(i8Imm);
10520 } IEM_MC_ELSE() {
10521 IEM_MC_ADVANCE_RIP();
10522 } IEM_MC_ENDIF();
10523 IEM_MC_END();
10524 return VINF_SUCCESS;
10525}
10526
10527
10528/** Opcode 0x7f. */
10529FNIEMOP_DEF(iemOp_jnle_Jb)
10530{
10531 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
10532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10535
10536 IEM_MC_BEGIN(0, 0);
10537 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
10538 IEM_MC_ADVANCE_RIP();
10539 } IEM_MC_ELSE() {
10540 IEM_MC_REL_JMP_S8(i8Imm);
10541 } IEM_MC_ENDIF();
10542 IEM_MC_END();
10543 return VINF_SUCCESS;
10544}
10545
10546
10547/** Opcode 0x80. */
10548FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
10549{
10550 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10551 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10552 {
10553 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
10554 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
10555 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
10556 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
10557 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
10558 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
10559 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
10560 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
10561 }
10562 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10563
10564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10565 {
10566 /* register target */
10567 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 IEM_MC_BEGIN(3, 0);
10570 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10571 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10572 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10573
10574 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10575 IEM_MC_REF_EFLAGS(pEFlags);
10576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10577
10578 IEM_MC_ADVANCE_RIP();
10579 IEM_MC_END();
10580 }
10581 else
10582 {
10583 /* memory target */
10584 uint32_t fAccess;
10585 if (pImpl->pfnLockedU8)
10586 fAccess = IEM_ACCESS_DATA_RW;
10587 else /* CMP */
10588 fAccess = IEM_ACCESS_DATA_R;
10589 IEM_MC_BEGIN(3, 2);
10590 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10591 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10593
10594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10595 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10596 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
10597 if (pImpl->pfnLockedU8)
10598 IEMOP_HLP_DONE_DECODING();
10599 else
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601
10602 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10603 IEM_MC_FETCH_EFLAGS(EFlags);
10604 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10605 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
10606 else
10607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
10608
10609 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
10610 IEM_MC_COMMIT_EFLAGS(EFlags);
10611 IEM_MC_ADVANCE_RIP();
10612 IEM_MC_END();
10613 }
10614 return VINF_SUCCESS;
10615}
10616
10617
10618/** Opcode 0x81. */
10619FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
10620{
10621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10622 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10623 {
10624 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
10625 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
10626 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
10627 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
10628 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
10629 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
10630 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
10631 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
10632 }
10633 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10634
10635 switch (pVCpu->iem.s.enmEffOpSize)
10636 {
10637 case IEMMODE_16BIT:
10638 {
10639 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10640 {
10641 /* register target */
10642 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_BEGIN(3, 0);
10645 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10646 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
10647 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10648
10649 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10650 IEM_MC_REF_EFLAGS(pEFlags);
10651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10652
10653 IEM_MC_ADVANCE_RIP();
10654 IEM_MC_END();
10655 }
10656 else
10657 {
10658 /* memory target */
10659 uint32_t fAccess;
10660 if (pImpl->pfnLockedU16)
10661 fAccess = IEM_ACCESS_DATA_RW;
10662 else /* CMP, TEST */
10663 fAccess = IEM_ACCESS_DATA_R;
10664 IEM_MC_BEGIN(3, 2);
10665 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10666 IEM_MC_ARG(uint16_t, u16Src, 1);
10667 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10669
10670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10671 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10672 IEM_MC_ASSIGN(u16Src, u16Imm);
10673 if (pImpl->pfnLockedU16)
10674 IEMOP_HLP_DONE_DECODING();
10675 else
10676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10677 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10678 IEM_MC_FETCH_EFLAGS(EFlags);
10679 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10681 else
10682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10683
10684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10685 IEM_MC_COMMIT_EFLAGS(EFlags);
10686 IEM_MC_ADVANCE_RIP();
10687 IEM_MC_END();
10688 }
10689 break;
10690 }
10691
10692 case IEMMODE_32BIT:
10693 {
10694 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10695 {
10696 /* register target */
10697 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10699 IEM_MC_BEGIN(3, 0);
10700 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10701 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10702 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10703
10704 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10705 IEM_MC_REF_EFLAGS(pEFlags);
10706 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10707 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10708
10709 IEM_MC_ADVANCE_RIP();
10710 IEM_MC_END();
10711 }
10712 else
10713 {
10714 /* memory target */
10715 uint32_t fAccess;
10716 if (pImpl->pfnLockedU32)
10717 fAccess = IEM_ACCESS_DATA_RW;
10718 else /* CMP, TEST */
10719 fAccess = IEM_ACCESS_DATA_R;
10720 IEM_MC_BEGIN(3, 2);
10721 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10722 IEM_MC_ARG(uint32_t, u32Src, 1);
10723 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10725
10726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10727 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10728 IEM_MC_ASSIGN(u32Src, u32Imm);
10729 if (pImpl->pfnLockedU32)
10730 IEMOP_HLP_DONE_DECODING();
10731 else
10732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10733 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10734 IEM_MC_FETCH_EFLAGS(EFlags);
10735 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10736 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10737 else
10738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10739
10740 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10741 IEM_MC_COMMIT_EFLAGS(EFlags);
10742 IEM_MC_ADVANCE_RIP();
10743 IEM_MC_END();
10744 }
10745 break;
10746 }
10747
10748 case IEMMODE_64BIT:
10749 {
10750 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10751 {
10752 /* register target */
10753 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10755 IEM_MC_BEGIN(3, 0);
10756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10757 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10758 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10759
10760 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10761 IEM_MC_REF_EFLAGS(pEFlags);
10762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10763
10764 IEM_MC_ADVANCE_RIP();
10765 IEM_MC_END();
10766 }
10767 else
10768 {
10769 /* memory target */
10770 uint32_t fAccess;
10771 if (pImpl->pfnLockedU64)
10772 fAccess = IEM_ACCESS_DATA_RW;
10773 else /* CMP */
10774 fAccess = IEM_ACCESS_DATA_R;
10775 IEM_MC_BEGIN(3, 2);
10776 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10777 IEM_MC_ARG(uint64_t, u64Src, 1);
10778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10780
10781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10782 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10783 if (pImpl->pfnLockedU64)
10784 IEMOP_HLP_DONE_DECODING();
10785 else
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787 IEM_MC_ASSIGN(u64Src, u64Imm);
10788 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10789 IEM_MC_FETCH_EFLAGS(EFlags);
10790 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10791 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10792 else
10793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10794
10795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10796 IEM_MC_COMMIT_EFLAGS(EFlags);
10797 IEM_MC_ADVANCE_RIP();
10798 IEM_MC_END();
10799 }
10800 break;
10801 }
10802 }
10803 return VINF_SUCCESS;
10804}
10805
10806
10807/** Opcode 0x82. */
10808FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10809{
10810 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10811 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10812}
10813
10814
10815/** Opcode 0x83. */
10816FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10817{
10818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10819 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10820 {
10821 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10822 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10823 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10824 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10825 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10826 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10827 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10828 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10829 }
10830 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10831 to the 386 even if absent in the intel reference manuals and some
10832 3rd party opcode listings. */
10833 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10834
10835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10836 {
10837 /*
10838 * Register target
10839 */
10840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10841 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10842 switch (pVCpu->iem.s.enmEffOpSize)
10843 {
10844 case IEMMODE_16BIT:
10845 {
10846 IEM_MC_BEGIN(3, 0);
10847 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10848 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10850
10851 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10852 IEM_MC_REF_EFLAGS(pEFlags);
10853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10854
10855 IEM_MC_ADVANCE_RIP();
10856 IEM_MC_END();
10857 break;
10858 }
10859
10860 case IEMMODE_32BIT:
10861 {
10862 IEM_MC_BEGIN(3, 0);
10863 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10864 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10865 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10866
10867 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10868 IEM_MC_REF_EFLAGS(pEFlags);
10869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10870 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10871
10872 IEM_MC_ADVANCE_RIP();
10873 IEM_MC_END();
10874 break;
10875 }
10876
10877 case IEMMODE_64BIT:
10878 {
10879 IEM_MC_BEGIN(3, 0);
10880 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10881 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10883
10884 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10885 IEM_MC_REF_EFLAGS(pEFlags);
10886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10887
10888 IEM_MC_ADVANCE_RIP();
10889 IEM_MC_END();
10890 break;
10891 }
10892 }
10893 }
10894 else
10895 {
10896 /*
10897 * Memory target.
10898 */
10899 uint32_t fAccess;
10900 if (pImpl->pfnLockedU16)
10901 fAccess = IEM_ACCESS_DATA_RW;
10902 else /* CMP */
10903 fAccess = IEM_ACCESS_DATA_R;
10904
10905 switch (pVCpu->iem.s.enmEffOpSize)
10906 {
10907 case IEMMODE_16BIT:
10908 {
10909 IEM_MC_BEGIN(3, 2);
10910 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10911 IEM_MC_ARG(uint16_t, u16Src, 1);
10912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10914
10915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10916 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10917 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10918 if (pImpl->pfnLockedU16)
10919 IEMOP_HLP_DONE_DECODING();
10920 else
10921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10922 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10923 IEM_MC_FETCH_EFLAGS(EFlags);
10924 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10926 else
10927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10928
10929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10930 IEM_MC_COMMIT_EFLAGS(EFlags);
10931 IEM_MC_ADVANCE_RIP();
10932 IEM_MC_END();
10933 break;
10934 }
10935
10936 case IEMMODE_32BIT:
10937 {
10938 IEM_MC_BEGIN(3, 2);
10939 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10940 IEM_MC_ARG(uint32_t, u32Src, 1);
10941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10943
10944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10945 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10946 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10947 if (pImpl->pfnLockedU32)
10948 IEMOP_HLP_DONE_DECODING();
10949 else
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10951 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10952 IEM_MC_FETCH_EFLAGS(EFlags);
10953 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10955 else
10956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10957
10958 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10959 IEM_MC_COMMIT_EFLAGS(EFlags);
10960 IEM_MC_ADVANCE_RIP();
10961 IEM_MC_END();
10962 break;
10963 }
10964
10965 case IEMMODE_64BIT:
10966 {
10967 IEM_MC_BEGIN(3, 2);
10968 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10969 IEM_MC_ARG(uint64_t, u64Src, 1);
10970 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10972
10973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10974 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10975 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10976 if (pImpl->pfnLockedU64)
10977 IEMOP_HLP_DONE_DECODING();
10978 else
10979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10980 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10981 IEM_MC_FETCH_EFLAGS(EFlags);
10982 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10983 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10984 else
10985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10986
10987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10988 IEM_MC_COMMIT_EFLAGS(EFlags);
10989 IEM_MC_ADVANCE_RIP();
10990 IEM_MC_END();
10991 break;
10992 }
10993 }
10994 }
10995 return VINF_SUCCESS;
10996}
10997
10998
10999/** Opcode 0x84. */
11000FNIEMOP_DEF(iemOp_test_Eb_Gb)
11001{
11002 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
11003 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
11005}
11006
11007
11008/** Opcode 0x85. */
11009FNIEMOP_DEF(iemOp_test_Ev_Gv)
11010{
11011 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
11012 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11013 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
11014}
11015
11016
11017/** Opcode 0x86. */
11018FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
11019{
11020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11021 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
11022
11023 /*
11024 * If rm is denoting a register, no more instruction bytes.
11025 */
11026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11027 {
11028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11029
11030 IEM_MC_BEGIN(0, 2);
11031 IEM_MC_LOCAL(uint8_t, uTmp1);
11032 IEM_MC_LOCAL(uint8_t, uTmp2);
11033
11034 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11035 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11036 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11037 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11038
11039 IEM_MC_ADVANCE_RIP();
11040 IEM_MC_END();
11041 }
11042 else
11043 {
11044 /*
11045 * We're accessing memory.
11046 */
11047/** @todo the register must be committed separately! */
11048 IEM_MC_BEGIN(2, 2);
11049 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
11050 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11052
11053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11054 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11055 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11056 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
11057 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
11058
11059 IEM_MC_ADVANCE_RIP();
11060 IEM_MC_END();
11061 }
11062 return VINF_SUCCESS;
11063}
11064
11065
11066/** Opcode 0x87. */
11067FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
11068{
11069 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
11070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11071
11072 /*
11073 * If rm is denoting a register, no more instruction bytes.
11074 */
11075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11076 {
11077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11078
11079 switch (pVCpu->iem.s.enmEffOpSize)
11080 {
11081 case IEMMODE_16BIT:
11082 IEM_MC_BEGIN(0, 2);
11083 IEM_MC_LOCAL(uint16_t, uTmp1);
11084 IEM_MC_LOCAL(uint16_t, uTmp2);
11085
11086 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11087 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11088 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11089 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11090
11091 IEM_MC_ADVANCE_RIP();
11092 IEM_MC_END();
11093 return VINF_SUCCESS;
11094
11095 case IEMMODE_32BIT:
11096 IEM_MC_BEGIN(0, 2);
11097 IEM_MC_LOCAL(uint32_t, uTmp1);
11098 IEM_MC_LOCAL(uint32_t, uTmp2);
11099
11100 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11101 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11102 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11103 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11104
11105 IEM_MC_ADVANCE_RIP();
11106 IEM_MC_END();
11107 return VINF_SUCCESS;
11108
11109 case IEMMODE_64BIT:
11110 IEM_MC_BEGIN(0, 2);
11111 IEM_MC_LOCAL(uint64_t, uTmp1);
11112 IEM_MC_LOCAL(uint64_t, uTmp2);
11113
11114 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11115 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11116 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
11117 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
11118
11119 IEM_MC_ADVANCE_RIP();
11120 IEM_MC_END();
11121 return VINF_SUCCESS;
11122
11123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11124 }
11125 }
11126 else
11127 {
11128 /*
11129 * We're accessing memory.
11130 */
11131 switch (pVCpu->iem.s.enmEffOpSize)
11132 {
11133/** @todo the register must be committed separately! */
11134 case IEMMODE_16BIT:
11135 IEM_MC_BEGIN(2, 2);
11136 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
11137 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11139
11140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11141 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11142 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11143 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
11144 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
11145
11146 IEM_MC_ADVANCE_RIP();
11147 IEM_MC_END();
11148 return VINF_SUCCESS;
11149
11150 case IEMMODE_32BIT:
11151 IEM_MC_BEGIN(2, 2);
11152 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
11153 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11155
11156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11157 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11158 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11159 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
11160 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
11161
11162 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11163 IEM_MC_ADVANCE_RIP();
11164 IEM_MC_END();
11165 return VINF_SUCCESS;
11166
11167 case IEMMODE_64BIT:
11168 IEM_MC_BEGIN(2, 2);
11169 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
11170 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11172
11173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11174 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11175 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11176 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
11177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
11178
11179 IEM_MC_ADVANCE_RIP();
11180 IEM_MC_END();
11181 return VINF_SUCCESS;
11182
11183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11184 }
11185 }
11186}
11187
11188
11189/** Opcode 0x88. */
11190FNIEMOP_DEF(iemOp_mov_Eb_Gb)
11191{
11192 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
11193
11194 uint8_t bRm;
11195 IEM_OPCODE_GET_NEXT_U8(&bRm);
11196
11197 /*
11198 * If rm is denoting a register, no more instruction bytes.
11199 */
11200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11201 {
11202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11203 IEM_MC_BEGIN(0, 1);
11204 IEM_MC_LOCAL(uint8_t, u8Value);
11205 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11206 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
11207 IEM_MC_ADVANCE_RIP();
11208 IEM_MC_END();
11209 }
11210 else
11211 {
11212 /*
11213 * We're writing a register to memory.
11214 */
11215 IEM_MC_BEGIN(0, 2);
11216 IEM_MC_LOCAL(uint8_t, u8Value);
11217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11220 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11221 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
11222 IEM_MC_ADVANCE_RIP();
11223 IEM_MC_END();
11224 }
11225 return VINF_SUCCESS;
11226
11227}
11228
11229
11230/** Opcode 0x89. */
11231FNIEMOP_DEF(iemOp_mov_Ev_Gv)
11232{
11233 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
11234
11235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11236
11237 /*
11238 * If rm is denoting a register, no more instruction bytes.
11239 */
11240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11241 {
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243 switch (pVCpu->iem.s.enmEffOpSize)
11244 {
11245 case IEMMODE_16BIT:
11246 IEM_MC_BEGIN(0, 1);
11247 IEM_MC_LOCAL(uint16_t, u16Value);
11248 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11249 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11250 IEM_MC_ADVANCE_RIP();
11251 IEM_MC_END();
11252 break;
11253
11254 case IEMMODE_32BIT:
11255 IEM_MC_BEGIN(0, 1);
11256 IEM_MC_LOCAL(uint32_t, u32Value);
11257 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11258 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11259 IEM_MC_ADVANCE_RIP();
11260 IEM_MC_END();
11261 break;
11262
11263 case IEMMODE_64BIT:
11264 IEM_MC_BEGIN(0, 1);
11265 IEM_MC_LOCAL(uint64_t, u64Value);
11266 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11267 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11268 IEM_MC_ADVANCE_RIP();
11269 IEM_MC_END();
11270 break;
11271 }
11272 }
11273 else
11274 {
11275 /*
11276 * We're writing a register to memory.
11277 */
11278 switch (pVCpu->iem.s.enmEffOpSize)
11279 {
11280 case IEMMODE_16BIT:
11281 IEM_MC_BEGIN(0, 2);
11282 IEM_MC_LOCAL(uint16_t, u16Value);
11283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11286 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11287 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11288 IEM_MC_ADVANCE_RIP();
11289 IEM_MC_END();
11290 break;
11291
11292 case IEMMODE_32BIT:
11293 IEM_MC_BEGIN(0, 2);
11294 IEM_MC_LOCAL(uint32_t, u32Value);
11295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11298 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11299 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
11300 IEM_MC_ADVANCE_RIP();
11301 IEM_MC_END();
11302 break;
11303
11304 case IEMMODE_64BIT:
11305 IEM_MC_BEGIN(0, 2);
11306 IEM_MC_LOCAL(uint64_t, u64Value);
11307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11310 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
11311 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
11312 IEM_MC_ADVANCE_RIP();
11313 IEM_MC_END();
11314 break;
11315 }
11316 }
11317 return VINF_SUCCESS;
11318}
11319
11320
11321/** Opcode 0x8a. */
11322FNIEMOP_DEF(iemOp_mov_Gb_Eb)
11323{
11324 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
11325
11326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11327
11328 /*
11329 * If rm is denoting a register, no more instruction bytes.
11330 */
11331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11332 {
11333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11334 IEM_MC_BEGIN(0, 1);
11335 IEM_MC_LOCAL(uint8_t, u8Value);
11336 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11337 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11338 IEM_MC_ADVANCE_RIP();
11339 IEM_MC_END();
11340 }
11341 else
11342 {
11343 /*
11344 * We're loading a register from memory.
11345 */
11346 IEM_MC_BEGIN(0, 2);
11347 IEM_MC_LOCAL(uint8_t, u8Value);
11348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11351 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11352 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
11353 IEM_MC_ADVANCE_RIP();
11354 IEM_MC_END();
11355 }
11356 return VINF_SUCCESS;
11357}
11358
11359
11360/** Opcode 0x8b. */
11361FNIEMOP_DEF(iemOp_mov_Gv_Ev)
11362{
11363 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
11364
11365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11366
11367 /*
11368 * If rm is denoting a register, no more instruction bytes.
11369 */
11370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11371 {
11372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11373 switch (pVCpu->iem.s.enmEffOpSize)
11374 {
11375 case IEMMODE_16BIT:
11376 IEM_MC_BEGIN(0, 1);
11377 IEM_MC_LOCAL(uint16_t, u16Value);
11378 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11379 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11380 IEM_MC_ADVANCE_RIP();
11381 IEM_MC_END();
11382 break;
11383
11384 case IEMMODE_32BIT:
11385 IEM_MC_BEGIN(0, 1);
11386 IEM_MC_LOCAL(uint32_t, u32Value);
11387 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11388 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11389 IEM_MC_ADVANCE_RIP();
11390 IEM_MC_END();
11391 break;
11392
11393 case IEMMODE_64BIT:
11394 IEM_MC_BEGIN(0, 1);
11395 IEM_MC_LOCAL(uint64_t, u64Value);
11396 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11397 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11398 IEM_MC_ADVANCE_RIP();
11399 IEM_MC_END();
11400 break;
11401 }
11402 }
11403 else
11404 {
11405 /*
11406 * We're loading a register from memory.
11407 */
11408 switch (pVCpu->iem.s.enmEffOpSize)
11409 {
11410 case IEMMODE_16BIT:
11411 IEM_MC_BEGIN(0, 2);
11412 IEM_MC_LOCAL(uint16_t, u16Value);
11413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11416 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11417 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
11418 IEM_MC_ADVANCE_RIP();
11419 IEM_MC_END();
11420 break;
11421
11422 case IEMMODE_32BIT:
11423 IEM_MC_BEGIN(0, 2);
11424 IEM_MC_LOCAL(uint32_t, u32Value);
11425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11429 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
11430 IEM_MC_ADVANCE_RIP();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_64BIT:
11435 IEM_MC_BEGIN(0, 2);
11436 IEM_MC_LOCAL(uint64_t, u64Value);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11440 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11441 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
11442 IEM_MC_ADVANCE_RIP();
11443 IEM_MC_END();
11444 break;
11445 }
11446 }
11447 return VINF_SUCCESS;
11448}
11449
11450
11451/** Opcode 0x63. */
11452FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
11453{
11454 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
11455 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
11456 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11457 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
11458 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
11459}
11460
11461
11462/** Opcode 0x8c. */
11463FNIEMOP_DEF(iemOp_mov_Ev_Sw)
11464{
11465 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
11466
11467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11468
11469 /*
11470 * Check that the destination register exists. The REX.R prefix is ignored.
11471 */
11472 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11473 if ( iSegReg > X86_SREG_GS)
11474 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11475
11476 /*
11477 * If rm is denoting a register, no more instruction bytes.
11478 * In that case, the operand size is respected and the upper bits are
11479 * cleared (starting with some pentium).
11480 */
11481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11482 {
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 switch (pVCpu->iem.s.enmEffOpSize)
11485 {
11486 case IEMMODE_16BIT:
11487 IEM_MC_BEGIN(0, 1);
11488 IEM_MC_LOCAL(uint16_t, u16Value);
11489 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11490 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
11491 IEM_MC_ADVANCE_RIP();
11492 IEM_MC_END();
11493 break;
11494
11495 case IEMMODE_32BIT:
11496 IEM_MC_BEGIN(0, 1);
11497 IEM_MC_LOCAL(uint32_t, u32Value);
11498 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
11499 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
11500 IEM_MC_ADVANCE_RIP();
11501 IEM_MC_END();
11502 break;
11503
11504 case IEMMODE_64BIT:
11505 IEM_MC_BEGIN(0, 1);
11506 IEM_MC_LOCAL(uint64_t, u64Value);
11507 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
11508 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
11509 IEM_MC_ADVANCE_RIP();
11510 IEM_MC_END();
11511 break;
11512 }
11513 }
11514 else
11515 {
11516 /*
11517 * We're saving the register to memory. The access is word sized
11518 * regardless of operand size prefixes.
11519 */
11520#if 0 /* not necessary */
11521 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11522#endif
11523 IEM_MC_BEGIN(0, 2);
11524 IEM_MC_LOCAL(uint16_t, u16Value);
11525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11528 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
11529 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
11530 IEM_MC_ADVANCE_RIP();
11531 IEM_MC_END();
11532 }
11533 return VINF_SUCCESS;
11534}
11535
11536
11537
11538
11539/** Opcode 0x8d. */
11540FNIEMOP_DEF(iemOp_lea_Gv_M)
11541{
11542 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
11543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11545 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
11546
11547 switch (pVCpu->iem.s.enmEffOpSize)
11548 {
11549 case IEMMODE_16BIT:
11550 IEM_MC_BEGIN(0, 2);
11551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11552 IEM_MC_LOCAL(uint16_t, u16Cast);
11553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11555 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
11556 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
11557 IEM_MC_ADVANCE_RIP();
11558 IEM_MC_END();
11559 return VINF_SUCCESS;
11560
11561 case IEMMODE_32BIT:
11562 IEM_MC_BEGIN(0, 2);
11563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11564 IEM_MC_LOCAL(uint32_t, u32Cast);
11565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
11568 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
11569 IEM_MC_ADVANCE_RIP();
11570 IEM_MC_END();
11571 return VINF_SUCCESS;
11572
11573 case IEMMODE_64BIT:
11574 IEM_MC_BEGIN(0, 1);
11575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11578 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
11579 IEM_MC_ADVANCE_RIP();
11580 IEM_MC_END();
11581 return VINF_SUCCESS;
11582 }
11583 AssertFailedReturn(VERR_IEM_IPE_7);
11584}
11585
11586
11587/** Opcode 0x8e. */
11588FNIEMOP_DEF(iemOp_mov_Sw_Ev)
11589{
11590 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
11591
11592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11593
11594 /*
11595 * The practical operand size is 16-bit.
11596 */
11597#if 0 /* not necessary */
11598 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
11599#endif
11600
11601 /*
11602 * Check that the destination register exists and can be used with this
11603 * instruction. The REX.R prefix is ignored.
11604 */
11605 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
11606 if ( iSegReg == X86_SREG_CS
11607 || iSegReg > X86_SREG_GS)
11608 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11609
11610 /*
11611 * If rm is denoting a register, no more instruction bytes.
11612 */
11613 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11614 {
11615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11616 IEM_MC_BEGIN(2, 0);
11617 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11618 IEM_MC_ARG(uint16_t, u16Value, 1);
11619 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11620 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11621 IEM_MC_END();
11622 }
11623 else
11624 {
11625 /*
11626 * We're loading the register from memory. The access is word sized
11627 * regardless of operand size prefixes.
11628 */
11629 IEM_MC_BEGIN(2, 1);
11630 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
11631 IEM_MC_ARG(uint16_t, u16Value, 1);
11632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11635 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11636 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
11637 IEM_MC_END();
11638 }
11639 return VINF_SUCCESS;
11640}
11641
11642
11643/** Opcode 0x8f /0. */
11644FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
11645{
11646 /* This bugger is rather annoying as it requires rSP to be updated before
11647 doing the effective address calculations. Will eventually require a
11648 split between the R/M+SIB decoding and the effective address
11649 calculation - which is something that is required for any attempt at
11650 reusing this code for a recompiler. It may also be good to have if we
11651 need to delay #UD exception caused by invalid lock prefixes.
11652
11653 For now, we'll do a mostly safe interpreter-only implementation here. */
11654 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
11655 * now until tests show it's checked.. */
11656 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
11657
11658 /* Register access is relatively easy and can share code. */
11659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11660 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11661
11662 /*
11663 * Memory target.
11664 *
11665 * Intel says that RSP is incremented before it's used in any effective
11666 * address calcuations. This means some serious extra annoyance here since
11667 * we decode and calculate the effective address in one step and like to
11668 * delay committing registers till everything is done.
11669 *
11670 * So, we'll decode and calculate the effective address twice. This will
11671 * require some recoding if turned into a recompiler.
11672 */
11673 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
11674
11675#ifndef TST_IEM_CHECK_MC
11676 /* Calc effective address with modified ESP. */
11677/** @todo testcase */
11678 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
11679 RTGCPTR GCPtrEff;
11680 VBOXSTRICTRC rcStrict;
11681 switch (pVCpu->iem.s.enmEffOpSize)
11682 {
11683 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
11684 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
11685 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
11686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11687 }
11688 if (rcStrict != VINF_SUCCESS)
11689 return rcStrict;
11690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11691
11692 /* Perform the operation - this should be CImpl. */
11693 RTUINT64U TmpRsp;
11694 TmpRsp.u = pCtx->rsp;
11695 switch (pVCpu->iem.s.enmEffOpSize)
11696 {
11697 case IEMMODE_16BIT:
11698 {
11699 uint16_t u16Value;
11700 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11701 if (rcStrict == VINF_SUCCESS)
11702 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11703 break;
11704 }
11705
11706 case IEMMODE_32BIT:
11707 {
11708 uint32_t u32Value;
11709 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11710 if (rcStrict == VINF_SUCCESS)
11711 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11712 break;
11713 }
11714
11715 case IEMMODE_64BIT:
11716 {
11717 uint64_t u64Value;
11718 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11719 if (rcStrict == VINF_SUCCESS)
11720 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11721 break;
11722 }
11723
11724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11725 }
11726 if (rcStrict == VINF_SUCCESS)
11727 {
11728 pCtx->rsp = TmpRsp.u;
11729 iemRegUpdateRipAndClearRF(pVCpu);
11730 }
11731 return rcStrict;
11732
11733#else
11734 return VERR_IEM_IPE_2;
11735#endif
11736}
11737
11738
11739/** Opcode 0x8f. */
11740FNIEMOP_DEF(iemOp_Grp1A)
11741{
11742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11743 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11744 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11745
11746 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11747 /** @todo XOP decoding. */
11748 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11749 return IEMOP_RAISE_INVALID_OPCODE();
11750}
11751
11752
11753/**
11754 * Common 'xchg reg,rAX' helper.
11755 */
11756FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11757{
11758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11759
11760 iReg |= pVCpu->iem.s.uRexB;
11761 switch (pVCpu->iem.s.enmEffOpSize)
11762 {
11763 case IEMMODE_16BIT:
11764 IEM_MC_BEGIN(0, 2);
11765 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11766 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11767 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11768 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11769 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11770 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11771 IEM_MC_ADVANCE_RIP();
11772 IEM_MC_END();
11773 return VINF_SUCCESS;
11774
11775 case IEMMODE_32BIT:
11776 IEM_MC_BEGIN(0, 2);
11777 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11778 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11779 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11780 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11781 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11782 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11783 IEM_MC_ADVANCE_RIP();
11784 IEM_MC_END();
11785 return VINF_SUCCESS;
11786
11787 case IEMMODE_64BIT:
11788 IEM_MC_BEGIN(0, 2);
11789 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11790 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11791 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11792 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11793 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11794 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11795 IEM_MC_ADVANCE_RIP();
11796 IEM_MC_END();
11797 return VINF_SUCCESS;
11798
11799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11800 }
11801}
11802
11803
11804/** Opcode 0x90. */
11805FNIEMOP_DEF(iemOp_nop)
11806{
11807 /* R8/R8D and RAX/EAX can be exchanged. */
11808 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11809 {
11810 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11811 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11812 }
11813
11814 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11815 IEMOP_MNEMONIC(pause, "pause");
11816 else
11817 IEMOP_MNEMONIC(nop, "nop");
11818 IEM_MC_BEGIN(0, 0);
11819 IEM_MC_ADVANCE_RIP();
11820 IEM_MC_END();
11821 return VINF_SUCCESS;
11822}
11823
11824
11825/** Opcode 0x91. */
11826FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11827{
11828 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11829 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11830}
11831
11832
11833/** Opcode 0x92. */
11834FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11835{
11836 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11837 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11838}
11839
11840
11841/** Opcode 0x93. */
11842FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11843{
11844 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11845 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11846}
11847
11848
11849/** Opcode 0x94. */
11850FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11851{
11852 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11853 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11854}
11855
11856
11857/** Opcode 0x95. */
11858FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11859{
11860 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11861 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11862}
11863
11864
11865/** Opcode 0x96. */
11866FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11867{
11868 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11869 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11870}
11871
11872
11873/** Opcode 0x97. */
11874FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11875{
11876 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11877 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11878}
11879
11880
11881/** Opcode 0x98. */
11882FNIEMOP_DEF(iemOp_cbw)
11883{
11884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11885 switch (pVCpu->iem.s.enmEffOpSize)
11886 {
11887 case IEMMODE_16BIT:
11888 IEMOP_MNEMONIC(cbw, "cbw");
11889 IEM_MC_BEGIN(0, 1);
11890 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11891 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11892 } IEM_MC_ELSE() {
11893 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11894 } IEM_MC_ENDIF();
11895 IEM_MC_ADVANCE_RIP();
11896 IEM_MC_END();
11897 return VINF_SUCCESS;
11898
11899 case IEMMODE_32BIT:
11900 IEMOP_MNEMONIC(cwde, "cwde");
11901 IEM_MC_BEGIN(0, 1);
11902 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11903 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11904 } IEM_MC_ELSE() {
11905 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11906 } IEM_MC_ENDIF();
11907 IEM_MC_ADVANCE_RIP();
11908 IEM_MC_END();
11909 return VINF_SUCCESS;
11910
11911 case IEMMODE_64BIT:
11912 IEMOP_MNEMONIC(cdqe, "cdqe");
11913 IEM_MC_BEGIN(0, 1);
11914 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11915 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11916 } IEM_MC_ELSE() {
11917 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11918 } IEM_MC_ENDIF();
11919 IEM_MC_ADVANCE_RIP();
11920 IEM_MC_END();
11921 return VINF_SUCCESS;
11922
11923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11924 }
11925}
11926
11927
11928/** Opcode 0x99. */
11929FNIEMOP_DEF(iemOp_cwd)
11930{
11931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11932 switch (pVCpu->iem.s.enmEffOpSize)
11933 {
11934 case IEMMODE_16BIT:
11935 IEMOP_MNEMONIC(cwd, "cwd");
11936 IEM_MC_BEGIN(0, 1);
11937 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11938 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11939 } IEM_MC_ELSE() {
11940 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11941 } IEM_MC_ENDIF();
11942 IEM_MC_ADVANCE_RIP();
11943 IEM_MC_END();
11944 return VINF_SUCCESS;
11945
11946 case IEMMODE_32BIT:
11947 IEMOP_MNEMONIC(cdq, "cdq");
11948 IEM_MC_BEGIN(0, 1);
11949 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11950 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11951 } IEM_MC_ELSE() {
11952 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11953 } IEM_MC_ENDIF();
11954 IEM_MC_ADVANCE_RIP();
11955 IEM_MC_END();
11956 return VINF_SUCCESS;
11957
11958 case IEMMODE_64BIT:
11959 IEMOP_MNEMONIC(cqo, "cqo");
11960 IEM_MC_BEGIN(0, 1);
11961 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11962 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11963 } IEM_MC_ELSE() {
11964 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11965 } IEM_MC_ENDIF();
11966 IEM_MC_ADVANCE_RIP();
11967 IEM_MC_END();
11968 return VINF_SUCCESS;
11969
11970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11971 }
11972}
11973
11974
11975/** Opcode 0x9a. */
11976FNIEMOP_DEF(iemOp_call_Ap)
11977{
11978 IEMOP_MNEMONIC(call_Ap, "call Ap");
11979 IEMOP_HLP_NO_64BIT();
11980
11981 /* Decode the far pointer address and pass it on to the far call C implementation. */
11982 uint32_t offSeg;
11983 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11984 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11985 else
11986 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11987 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11989 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11990}
11991
11992
11993/** Opcode 0x9b. (aka fwait) */
11994FNIEMOP_DEF(iemOp_wait)
11995{
11996 IEMOP_MNEMONIC(wait, "wait");
11997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11998
11999 IEM_MC_BEGIN(0, 0);
12000 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
12001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12002 IEM_MC_ADVANCE_RIP();
12003 IEM_MC_END();
12004 return VINF_SUCCESS;
12005}
12006
12007
12008/** Opcode 0x9c. */
12009FNIEMOP_DEF(iemOp_pushf_Fv)
12010{
12011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12012 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12013 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
12014}
12015
12016
12017/** Opcode 0x9d. */
12018FNIEMOP_DEF(iemOp_popf_Fv)
12019{
12020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12022 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
12023}
12024
12025
12026/** Opcode 0x9e. */
12027FNIEMOP_DEF(iemOp_sahf)
12028{
12029 IEMOP_MNEMONIC(sahf, "sahf");
12030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12031 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12032 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12033 return IEMOP_RAISE_INVALID_OPCODE();
12034 IEM_MC_BEGIN(0, 2);
12035 IEM_MC_LOCAL(uint32_t, u32Flags);
12036 IEM_MC_LOCAL(uint32_t, EFlags);
12037 IEM_MC_FETCH_EFLAGS(EFlags);
12038 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
12039 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
12040 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
12041 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
12042 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
12043 IEM_MC_COMMIT_EFLAGS(EFlags);
12044 IEM_MC_ADVANCE_RIP();
12045 IEM_MC_END();
12046 return VINF_SUCCESS;
12047}
12048
12049
12050/** Opcode 0x9f. */
12051FNIEMOP_DEF(iemOp_lahf)
12052{
12053 IEMOP_MNEMONIC(lahf, "lahf");
12054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12055 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12056 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
12057 return IEMOP_RAISE_INVALID_OPCODE();
12058 IEM_MC_BEGIN(0, 1);
12059 IEM_MC_LOCAL(uint8_t, u8Flags);
12060 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
12061 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
12062 IEM_MC_ADVANCE_RIP();
12063 IEM_MC_END();
12064 return VINF_SUCCESS;
12065}
12066
12067
12068/**
12069 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
12070 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
12071 * prefixes. Will return on failures.
12072 * @param a_GCPtrMemOff The variable to store the offset in.
12073 */
12074#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
12075 do \
12076 { \
12077 switch (pVCpu->iem.s.enmEffAddrMode) \
12078 { \
12079 case IEMMODE_16BIT: \
12080 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
12081 break; \
12082 case IEMMODE_32BIT: \
12083 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
12084 break; \
12085 case IEMMODE_64BIT: \
12086 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
12087 break; \
12088 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12089 } \
12090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12091 } while (0)
12092
12093/** Opcode 0xa0. */
12094FNIEMOP_DEF(iemOp_mov_Al_Ob)
12095{
12096 /*
12097 * Get the offset and fend of lock prefixes.
12098 */
12099 RTGCPTR GCPtrMemOff;
12100 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12101
12102 /*
12103 * Fetch AL.
12104 */
12105 IEM_MC_BEGIN(0,1);
12106 IEM_MC_LOCAL(uint8_t, u8Tmp);
12107 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12108 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12109 IEM_MC_ADVANCE_RIP();
12110 IEM_MC_END();
12111 return VINF_SUCCESS;
12112}
12113
12114
12115/** Opcode 0xa1. */
12116FNIEMOP_DEF(iemOp_mov_rAX_Ov)
12117{
12118 /*
12119 * Get the offset and fend of lock prefixes.
12120 */
12121 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
12122 RTGCPTR GCPtrMemOff;
12123 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12124
12125 /*
12126 * Fetch rAX.
12127 */
12128 switch (pVCpu->iem.s.enmEffOpSize)
12129 {
12130 case IEMMODE_16BIT:
12131 IEM_MC_BEGIN(0,1);
12132 IEM_MC_LOCAL(uint16_t, u16Tmp);
12133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12134 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12135 IEM_MC_ADVANCE_RIP();
12136 IEM_MC_END();
12137 return VINF_SUCCESS;
12138
12139 case IEMMODE_32BIT:
12140 IEM_MC_BEGIN(0,1);
12141 IEM_MC_LOCAL(uint32_t, u32Tmp);
12142 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12143 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
12144 IEM_MC_ADVANCE_RIP();
12145 IEM_MC_END();
12146 return VINF_SUCCESS;
12147
12148 case IEMMODE_64BIT:
12149 IEM_MC_BEGIN(0,1);
12150 IEM_MC_LOCAL(uint64_t, u64Tmp);
12151 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
12152 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
12153 IEM_MC_ADVANCE_RIP();
12154 IEM_MC_END();
12155 return VINF_SUCCESS;
12156
12157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12158 }
12159}
12160
12161
12162/** Opcode 0xa2. */
12163FNIEMOP_DEF(iemOp_mov_Ob_AL)
12164{
12165 /*
12166 * Get the offset and fend of lock prefixes.
12167 */
12168 RTGCPTR GCPtrMemOff;
12169 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12170
12171 /*
12172 * Store AL.
12173 */
12174 IEM_MC_BEGIN(0,1);
12175 IEM_MC_LOCAL(uint8_t, u8Tmp);
12176 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
12177 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
12178 IEM_MC_ADVANCE_RIP();
12179 IEM_MC_END();
12180 return VINF_SUCCESS;
12181}
12182
12183
12184/** Opcode 0xa3. */
12185FNIEMOP_DEF(iemOp_mov_Ov_rAX)
12186{
12187 /*
12188 * Get the offset and fend of lock prefixes.
12189 */
12190 RTGCPTR GCPtrMemOff;
12191 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
12192
12193 /*
12194 * Store rAX.
12195 */
12196 switch (pVCpu->iem.s.enmEffOpSize)
12197 {
12198 case IEMMODE_16BIT:
12199 IEM_MC_BEGIN(0,1);
12200 IEM_MC_LOCAL(uint16_t, u16Tmp);
12201 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
12202 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
12203 IEM_MC_ADVANCE_RIP();
12204 IEM_MC_END();
12205 return VINF_SUCCESS;
12206
12207 case IEMMODE_32BIT:
12208 IEM_MC_BEGIN(0,1);
12209 IEM_MC_LOCAL(uint32_t, u32Tmp);
12210 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
12211 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
12212 IEM_MC_ADVANCE_RIP();
12213 IEM_MC_END();
12214 return VINF_SUCCESS;
12215
12216 case IEMMODE_64BIT:
12217 IEM_MC_BEGIN(0,1);
12218 IEM_MC_LOCAL(uint64_t, u64Tmp);
12219 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
12220 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
12221 IEM_MC_ADVANCE_RIP();
12222 IEM_MC_END();
12223 return VINF_SUCCESS;
12224
12225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12226 }
12227}
12228
12229/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
12230#define IEM_MOVS_CASE(ValBits, AddrBits) \
12231 IEM_MC_BEGIN(0, 2); \
12232 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12233 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12234 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12235 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12236 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12237 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12239 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12240 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12241 } IEM_MC_ELSE() { \
12242 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12243 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12244 } IEM_MC_ENDIF(); \
12245 IEM_MC_ADVANCE_RIP(); \
12246 IEM_MC_END();
12247
12248/** Opcode 0xa4. */
12249FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
12250{
12251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12252
12253 /*
12254 * Use the C implementation if a repeat prefix is encountered.
12255 */
12256 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12257 {
12258 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
12259 switch (pVCpu->iem.s.enmEffAddrMode)
12260 {
12261 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
12262 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
12263 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266 }
12267 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
12268
12269 /*
12270 * Sharing case implementation with movs[wdq] below.
12271 */
12272 switch (pVCpu->iem.s.enmEffAddrMode)
12273 {
12274 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
12275 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
12276 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
12277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12278 }
12279 return VINF_SUCCESS;
12280}
12281
12282
12283/** Opcode 0xa5. */
12284FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
12285{
12286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12287
12288 /*
12289 * Use the C implementation if a repeat prefix is encountered.
12290 */
12291 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12292 {
12293 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
12294 switch (pVCpu->iem.s.enmEffOpSize)
12295 {
12296 case IEMMODE_16BIT:
12297 switch (pVCpu->iem.s.enmEffAddrMode)
12298 {
12299 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
12300 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
12301 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
12302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12303 }
12304 break;
12305 case IEMMODE_32BIT:
12306 switch (pVCpu->iem.s.enmEffAddrMode)
12307 {
12308 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
12309 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
12310 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
12311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12312 }
12313 case IEMMODE_64BIT:
12314 switch (pVCpu->iem.s.enmEffAddrMode)
12315 {
12316 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
12317 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
12318 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
12319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12320 }
12321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12322 }
12323 }
12324 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
12325
12326 /*
12327 * Annoying double switch here.
12328 * Using ugly macro for implementing the cases, sharing it with movsb.
12329 */
12330 switch (pVCpu->iem.s.enmEffOpSize)
12331 {
12332 case IEMMODE_16BIT:
12333 switch (pVCpu->iem.s.enmEffAddrMode)
12334 {
12335 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
12336 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
12337 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
12338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12339 }
12340 break;
12341
12342 case IEMMODE_32BIT:
12343 switch (pVCpu->iem.s.enmEffAddrMode)
12344 {
12345 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
12346 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
12347 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
12348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12349 }
12350 break;
12351
12352 case IEMMODE_64BIT:
12353 switch (pVCpu->iem.s.enmEffAddrMode)
12354 {
12355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12356 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
12357 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
12358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12359 }
12360 break;
12361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12362 }
12363 return VINF_SUCCESS;
12364}
12365
12366#undef IEM_MOVS_CASE
12367
12368/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
12369#define IEM_CMPS_CASE(ValBits, AddrBits) \
12370 IEM_MC_BEGIN(3, 3); \
12371 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
12372 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
12373 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12374 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
12375 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12376 \
12377 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12378 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
12379 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12380 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
12381 IEM_MC_REF_LOCAL(puValue1, uValue1); \
12382 IEM_MC_REF_EFLAGS(pEFlags); \
12383 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
12384 \
12385 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12386 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12387 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12388 } IEM_MC_ELSE() { \
12389 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12390 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12391 } IEM_MC_ENDIF(); \
12392 IEM_MC_ADVANCE_RIP(); \
12393 IEM_MC_END(); \
12394
12395/** Opcode 0xa6. */
12396FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
12397{
12398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12399
12400 /*
12401 * Use the C implementation if a repeat prefix is encountered.
12402 */
12403 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12404 {
12405 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
12406 switch (pVCpu->iem.s.enmEffAddrMode)
12407 {
12408 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12412 }
12413 }
12414 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12415 {
12416 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
12417 switch (pVCpu->iem.s.enmEffAddrMode)
12418 {
12419 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
12420 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
12421 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
12422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12423 }
12424 }
12425 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
12426
12427 /*
12428 * Sharing case implementation with cmps[wdq] below.
12429 */
12430 switch (pVCpu->iem.s.enmEffAddrMode)
12431 {
12432 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
12433 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
12434 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
12435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12436 }
12437 return VINF_SUCCESS;
12438
12439}
12440
12441
12442/** Opcode 0xa7. */
12443FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
12444{
12445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12446
12447 /*
12448 * Use the C implementation if a repeat prefix is encountered.
12449 */
12450 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12451 {
12452 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
12453 switch (pVCpu->iem.s.enmEffOpSize)
12454 {
12455 case IEMMODE_16BIT:
12456 switch (pVCpu->iem.s.enmEffAddrMode)
12457 {
12458 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12459 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12460 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12462 }
12463 break;
12464 case IEMMODE_32BIT:
12465 switch (pVCpu->iem.s.enmEffAddrMode)
12466 {
12467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12471 }
12472 case IEMMODE_64BIT:
12473 switch (pVCpu->iem.s.enmEffAddrMode)
12474 {
12475 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
12476 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12477 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12479 }
12480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12481 }
12482 }
12483
12484 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12485 {
12486 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
12487 switch (pVCpu->iem.s.enmEffOpSize)
12488 {
12489 case IEMMODE_16BIT:
12490 switch (pVCpu->iem.s.enmEffAddrMode)
12491 {
12492 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
12493 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
12494 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
12495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12496 }
12497 break;
12498 case IEMMODE_32BIT:
12499 switch (pVCpu->iem.s.enmEffAddrMode)
12500 {
12501 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
12502 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
12503 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
12504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12505 }
12506 case IEMMODE_64BIT:
12507 switch (pVCpu->iem.s.enmEffAddrMode)
12508 {
12509 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
12510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
12511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
12512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12513 }
12514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12515 }
12516 }
12517
12518 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
12519
12520 /*
12521 * Annoying double switch here.
12522 * Using ugly macro for implementing the cases, sharing it with cmpsb.
12523 */
12524 switch (pVCpu->iem.s.enmEffOpSize)
12525 {
12526 case IEMMODE_16BIT:
12527 switch (pVCpu->iem.s.enmEffAddrMode)
12528 {
12529 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
12530 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
12531 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
12532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12533 }
12534 break;
12535
12536 case IEMMODE_32BIT:
12537 switch (pVCpu->iem.s.enmEffAddrMode)
12538 {
12539 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
12540 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
12541 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
12542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12543 }
12544 break;
12545
12546 case IEMMODE_64BIT:
12547 switch (pVCpu->iem.s.enmEffAddrMode)
12548 {
12549 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12550 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
12551 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
12552 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12553 }
12554 break;
12555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12556 }
12557 return VINF_SUCCESS;
12558
12559}
12560
12561#undef IEM_CMPS_CASE
12562
12563/** Opcode 0xa8. */
12564FNIEMOP_DEF(iemOp_test_AL_Ib)
12565{
12566 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
12567 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
12569}
12570
12571
12572/** Opcode 0xa9. */
12573FNIEMOP_DEF(iemOp_test_eAX_Iz)
12574{
12575 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
12576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12577 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
12578}
12579
12580
12581/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
12582#define IEM_STOS_CASE(ValBits, AddrBits) \
12583 IEM_MC_BEGIN(0, 2); \
12584 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12585 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12586 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
12587 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12588 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
12589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12590 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12591 } IEM_MC_ELSE() { \
12592 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12593 } IEM_MC_ENDIF(); \
12594 IEM_MC_ADVANCE_RIP(); \
12595 IEM_MC_END(); \
12596
12597/** Opcode 0xaa. */
12598FNIEMOP_DEF(iemOp_stosb_Yb_AL)
12599{
12600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12601
12602 /*
12603 * Use the C implementation if a repeat prefix is encountered.
12604 */
12605 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12606 {
12607 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
12608 switch (pVCpu->iem.s.enmEffAddrMode)
12609 {
12610 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
12611 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
12612 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
12613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12614 }
12615 }
12616 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
12617
12618 /*
12619 * Sharing case implementation with stos[wdq] below.
12620 */
12621 switch (pVCpu->iem.s.enmEffAddrMode)
12622 {
12623 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
12624 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
12625 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
12626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12627 }
12628 return VINF_SUCCESS;
12629}
12630
12631
12632/** Opcode 0xab. */
12633FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
12634{
12635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12636
12637 /*
12638 * Use the C implementation if a repeat prefix is encountered.
12639 */
12640 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12641 {
12642 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
12643 switch (pVCpu->iem.s.enmEffOpSize)
12644 {
12645 case IEMMODE_16BIT:
12646 switch (pVCpu->iem.s.enmEffAddrMode)
12647 {
12648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
12649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
12650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
12651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12652 }
12653 break;
12654 case IEMMODE_32BIT:
12655 switch (pVCpu->iem.s.enmEffAddrMode)
12656 {
12657 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
12658 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
12659 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
12660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12661 }
12662 case IEMMODE_64BIT:
12663 switch (pVCpu->iem.s.enmEffAddrMode)
12664 {
12665 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
12666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
12667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
12668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12669 }
12670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12671 }
12672 }
12673 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
12674
12675 /*
12676 * Annoying double switch here.
12677 * Using ugly macro for implementing the cases, sharing it with stosb.
12678 */
12679 switch (pVCpu->iem.s.enmEffOpSize)
12680 {
12681 case IEMMODE_16BIT:
12682 switch (pVCpu->iem.s.enmEffAddrMode)
12683 {
12684 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
12685 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
12686 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
12687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12688 }
12689 break;
12690
12691 case IEMMODE_32BIT:
12692 switch (pVCpu->iem.s.enmEffAddrMode)
12693 {
12694 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12695 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12696 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12698 }
12699 break;
12700
12701 case IEMMODE_64BIT:
12702 switch (pVCpu->iem.s.enmEffAddrMode)
12703 {
12704 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12705 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12706 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12708 }
12709 break;
12710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12711 }
12712 return VINF_SUCCESS;
12713}
12714
12715#undef IEM_STOS_CASE
12716
12717/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12718#define IEM_LODS_CASE(ValBits, AddrBits) \
12719 IEM_MC_BEGIN(0, 2); \
12720 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12721 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12722 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12723 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12724 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12726 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12727 } IEM_MC_ELSE() { \
12728 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12729 } IEM_MC_ENDIF(); \
12730 IEM_MC_ADVANCE_RIP(); \
12731 IEM_MC_END();
12732
12733/** Opcode 0xac. */
12734FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12735{
12736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12737
12738 /*
12739 * Use the C implementation if a repeat prefix is encountered.
12740 */
12741 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12742 {
12743 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12744 switch (pVCpu->iem.s.enmEffAddrMode)
12745 {
12746 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12747 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12748 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12750 }
12751 }
12752 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12753
12754 /*
12755 * Sharing case implementation with stos[wdq] below.
12756 */
12757 switch (pVCpu->iem.s.enmEffAddrMode)
12758 {
12759 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12760 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12761 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12763 }
12764 return VINF_SUCCESS;
12765}
12766
12767
12768/** Opcode 0xad. */
12769FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12770{
12771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12772
12773 /*
12774 * Use the C implementation if a repeat prefix is encountered.
12775 */
12776 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12777 {
12778 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12779 switch (pVCpu->iem.s.enmEffOpSize)
12780 {
12781 case IEMMODE_16BIT:
12782 switch (pVCpu->iem.s.enmEffAddrMode)
12783 {
12784 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12785 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12786 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12788 }
12789 break;
12790 case IEMMODE_32BIT:
12791 switch (pVCpu->iem.s.enmEffAddrMode)
12792 {
12793 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12794 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12795 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12797 }
12798 case IEMMODE_64BIT:
12799 switch (pVCpu->iem.s.enmEffAddrMode)
12800 {
12801 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12805 }
12806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12807 }
12808 }
12809 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12810
12811 /*
12812 * Annoying double switch here.
12813 * Using ugly macro for implementing the cases, sharing it with lodsb.
12814 */
12815 switch (pVCpu->iem.s.enmEffOpSize)
12816 {
12817 case IEMMODE_16BIT:
12818 switch (pVCpu->iem.s.enmEffAddrMode)
12819 {
12820 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12821 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12822 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12824 }
12825 break;
12826
12827 case IEMMODE_32BIT:
12828 switch (pVCpu->iem.s.enmEffAddrMode)
12829 {
12830 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12831 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12832 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12834 }
12835 break;
12836
12837 case IEMMODE_64BIT:
12838 switch (pVCpu->iem.s.enmEffAddrMode)
12839 {
12840 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12841 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12842 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12844 }
12845 break;
12846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12847 }
12848 return VINF_SUCCESS;
12849}
12850
12851#undef IEM_LODS_CASE
12852
12853/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12854#define IEM_SCAS_CASE(ValBits, AddrBits) \
12855 IEM_MC_BEGIN(3, 2); \
12856 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12857 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12859 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12860 \
12861 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12862 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12863 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12864 IEM_MC_REF_EFLAGS(pEFlags); \
12865 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12866 \
12867 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12868 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12869 } IEM_MC_ELSE() { \
12870 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12871 } IEM_MC_ENDIF(); \
12872 IEM_MC_ADVANCE_RIP(); \
12873 IEM_MC_END();
12874
12875/** Opcode 0xae. */
12876FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12877{
12878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12879
12880 /*
12881 * Use the C implementation if a repeat prefix is encountered.
12882 */
12883 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12884 {
12885 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12886 switch (pVCpu->iem.s.enmEffAddrMode)
12887 {
12888 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12889 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12890 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12892 }
12893 }
12894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12895 {
12896 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12897 switch (pVCpu->iem.s.enmEffAddrMode)
12898 {
12899 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12900 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12901 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12903 }
12904 }
12905 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12906
12907 /*
12908 * Sharing case implementation with stos[wdq] below.
12909 */
12910 switch (pVCpu->iem.s.enmEffAddrMode)
12911 {
12912 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12913 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12914 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12916 }
12917 return VINF_SUCCESS;
12918}
12919
12920
12921/** Opcode 0xaf. */
12922FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12923{
12924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12925
12926 /*
12927 * Use the C implementation if a repeat prefix is encountered.
12928 */
12929 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12930 {
12931 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12932 switch (pVCpu->iem.s.enmEffOpSize)
12933 {
12934 case IEMMODE_16BIT:
12935 switch (pVCpu->iem.s.enmEffAddrMode)
12936 {
12937 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12938 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12939 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12941 }
12942 break;
12943 case IEMMODE_32BIT:
12944 switch (pVCpu->iem.s.enmEffAddrMode)
12945 {
12946 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12947 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12948 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12950 }
12951 case IEMMODE_64BIT:
12952 switch (pVCpu->iem.s.enmEffAddrMode)
12953 {
12954 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12955 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12956 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12958 }
12959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12960 }
12961 }
12962 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12963 {
12964 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12965 switch (pVCpu->iem.s.enmEffOpSize)
12966 {
12967 case IEMMODE_16BIT:
12968 switch (pVCpu->iem.s.enmEffAddrMode)
12969 {
12970 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12971 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12972 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12974 }
12975 break;
12976 case IEMMODE_32BIT:
12977 switch (pVCpu->iem.s.enmEffAddrMode)
12978 {
12979 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12980 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12981 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12982 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12983 }
12984 case IEMMODE_64BIT:
12985 switch (pVCpu->iem.s.enmEffAddrMode)
12986 {
12987 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12988 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12989 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12991 }
12992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12993 }
12994 }
12995 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12996
12997 /*
12998 * Annoying double switch here.
12999 * Using ugly macro for implementing the cases, sharing it with scasb.
13000 */
13001 switch (pVCpu->iem.s.enmEffOpSize)
13002 {
13003 case IEMMODE_16BIT:
13004 switch (pVCpu->iem.s.enmEffAddrMode)
13005 {
13006 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
13007 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
13008 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
13009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13010 }
13011 break;
13012
13013 case IEMMODE_32BIT:
13014 switch (pVCpu->iem.s.enmEffAddrMode)
13015 {
13016 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
13017 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
13018 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
13019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13020 }
13021 break;
13022
13023 case IEMMODE_64BIT:
13024 switch (pVCpu->iem.s.enmEffAddrMode)
13025 {
13026 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
13027 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
13028 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
13029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13030 }
13031 break;
13032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13033 }
13034 return VINF_SUCCESS;
13035}
13036
13037#undef IEM_SCAS_CASE
13038
13039/**
13040 * Common 'mov r8, imm8' helper.
13041 */
13042FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
13043{
13044 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13046
13047 IEM_MC_BEGIN(0, 1);
13048 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
13049 IEM_MC_STORE_GREG_U8(iReg, u8Value);
13050 IEM_MC_ADVANCE_RIP();
13051 IEM_MC_END();
13052
13053 return VINF_SUCCESS;
13054}
13055
13056
13057/** Opcode 0xb0. */
13058FNIEMOP_DEF(iemOp_mov_AL_Ib)
13059{
13060 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
13061 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13062}
13063
13064
13065/** Opcode 0xb1. */
13066FNIEMOP_DEF(iemOp_CL_Ib)
13067{
13068 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
13069 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13070}
13071
13072
13073/** Opcode 0xb2. */
13074FNIEMOP_DEF(iemOp_DL_Ib)
13075{
13076 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
13077 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13078}
13079
13080
13081/** Opcode 0xb3. */
13082FNIEMOP_DEF(iemOp_BL_Ib)
13083{
13084 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
13085 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13086}
13087
13088
13089/** Opcode 0xb4. */
13090FNIEMOP_DEF(iemOp_mov_AH_Ib)
13091{
13092 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
13093 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13094}
13095
13096
13097/** Opcode 0xb5. */
13098FNIEMOP_DEF(iemOp_CH_Ib)
13099{
13100 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
13101 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13102}
13103
13104
13105/** Opcode 0xb6. */
13106FNIEMOP_DEF(iemOp_DH_Ib)
13107{
13108 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
13109 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13110}
13111
13112
13113/** Opcode 0xb7. */
13114FNIEMOP_DEF(iemOp_BH_Ib)
13115{
13116 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
13117 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13118}
13119
13120
13121/**
13122 * Common 'mov regX,immX' helper.
13123 */
13124FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
13125{
13126 switch (pVCpu->iem.s.enmEffOpSize)
13127 {
13128 case IEMMODE_16BIT:
13129 {
13130 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13132
13133 IEM_MC_BEGIN(0, 1);
13134 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
13135 IEM_MC_STORE_GREG_U16(iReg, u16Value);
13136 IEM_MC_ADVANCE_RIP();
13137 IEM_MC_END();
13138 break;
13139 }
13140
13141 case IEMMODE_32BIT:
13142 {
13143 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13145
13146 IEM_MC_BEGIN(0, 1);
13147 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
13148 IEM_MC_STORE_GREG_U32(iReg, u32Value);
13149 IEM_MC_ADVANCE_RIP();
13150 IEM_MC_END();
13151 break;
13152 }
13153 case IEMMODE_64BIT:
13154 {
13155 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
13156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13157
13158 IEM_MC_BEGIN(0, 1);
13159 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
13160 IEM_MC_STORE_GREG_U64(iReg, u64Value);
13161 IEM_MC_ADVANCE_RIP();
13162 IEM_MC_END();
13163 break;
13164 }
13165 }
13166
13167 return VINF_SUCCESS;
13168}
13169
13170
13171/** Opcode 0xb8. */
13172FNIEMOP_DEF(iemOp_eAX_Iv)
13173{
13174 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
13175 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
13176}
13177
13178
13179/** Opcode 0xb9. */
13180FNIEMOP_DEF(iemOp_eCX_Iv)
13181{
13182 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
13183 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
13184}
13185
13186
13187/** Opcode 0xba. */
13188FNIEMOP_DEF(iemOp_eDX_Iv)
13189{
13190 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
13191 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
13192}
13193
13194
13195/** Opcode 0xbb. */
13196FNIEMOP_DEF(iemOp_eBX_Iv)
13197{
13198 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
13199 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
13200}
13201
13202
13203/** Opcode 0xbc. */
13204FNIEMOP_DEF(iemOp_eSP_Iv)
13205{
13206 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
13207 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
13208}
13209
13210
13211/** Opcode 0xbd. */
13212FNIEMOP_DEF(iemOp_eBP_Iv)
13213{
13214 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
13215 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
13216}
13217
13218
13219/** Opcode 0xbe. */
13220FNIEMOP_DEF(iemOp_eSI_Iv)
13221{
13222 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
13223 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
13224}
13225
13226
13227/** Opcode 0xbf. */
13228FNIEMOP_DEF(iemOp_eDI_Iv)
13229{
13230 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
13231 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
13232}
13233
13234
13235/** Opcode 0xc0. */
13236FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
13237{
13238 IEMOP_HLP_MIN_186();
13239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13240 PCIEMOPSHIFTSIZES pImpl;
13241 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13242 {
13243 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
13244 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
13245 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
13246 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
13247 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
13248 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
13249 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
13250 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13251 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13252 }
13253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13254
13255 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13256 {
13257 /* register */
13258 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13260 IEM_MC_BEGIN(3, 0);
13261 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13262 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13264 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13265 IEM_MC_REF_EFLAGS(pEFlags);
13266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13267 IEM_MC_ADVANCE_RIP();
13268 IEM_MC_END();
13269 }
13270 else
13271 {
13272 /* memory */
13273 IEM_MC_BEGIN(3, 2);
13274 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13275 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13278
13279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13280 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13281 IEM_MC_ASSIGN(cShiftArg, cShift);
13282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13283 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13284 IEM_MC_FETCH_EFLAGS(EFlags);
13285 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13286
13287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13288 IEM_MC_COMMIT_EFLAGS(EFlags);
13289 IEM_MC_ADVANCE_RIP();
13290 IEM_MC_END();
13291 }
13292 return VINF_SUCCESS;
13293}
13294
13295
13296/** Opcode 0xc1. */
13297FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
13298{
13299 IEMOP_HLP_MIN_186();
13300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13301 PCIEMOPSHIFTSIZES pImpl;
13302 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13303 {
13304 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
13305 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
13306 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
13307 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
13308 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
13309 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
13310 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
13311 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13313 }
13314 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13315
13316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13317 {
13318 /* register */
13319 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13321 switch (pVCpu->iem.s.enmEffOpSize)
13322 {
13323 case IEMMODE_16BIT:
13324 IEM_MC_BEGIN(3, 0);
13325 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13326 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13328 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13329 IEM_MC_REF_EFLAGS(pEFlags);
13330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13331 IEM_MC_ADVANCE_RIP();
13332 IEM_MC_END();
13333 return VINF_SUCCESS;
13334
13335 case IEMMODE_32BIT:
13336 IEM_MC_BEGIN(3, 0);
13337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13338 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13340 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13341 IEM_MC_REF_EFLAGS(pEFlags);
13342 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13343 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13344 IEM_MC_ADVANCE_RIP();
13345 IEM_MC_END();
13346 return VINF_SUCCESS;
13347
13348 case IEMMODE_64BIT:
13349 IEM_MC_BEGIN(3, 0);
13350 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13351 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
13352 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13353 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13354 IEM_MC_REF_EFLAGS(pEFlags);
13355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13356 IEM_MC_ADVANCE_RIP();
13357 IEM_MC_END();
13358 return VINF_SUCCESS;
13359
13360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13361 }
13362 }
13363 else
13364 {
13365 /* memory */
13366 switch (pVCpu->iem.s.enmEffOpSize)
13367 {
13368 case IEMMODE_16BIT:
13369 IEM_MC_BEGIN(3, 2);
13370 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13371 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13374
13375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13376 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13377 IEM_MC_ASSIGN(cShiftArg, cShift);
13378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13379 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13380 IEM_MC_FETCH_EFLAGS(EFlags);
13381 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13382
13383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13384 IEM_MC_COMMIT_EFLAGS(EFlags);
13385 IEM_MC_ADVANCE_RIP();
13386 IEM_MC_END();
13387 return VINF_SUCCESS;
13388
13389 case IEMMODE_32BIT:
13390 IEM_MC_BEGIN(3, 2);
13391 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13392 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13393 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13395
13396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13397 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13398 IEM_MC_ASSIGN(cShiftArg, cShift);
13399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13400 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13401 IEM_MC_FETCH_EFLAGS(EFlags);
13402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13403
13404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13405 IEM_MC_COMMIT_EFLAGS(EFlags);
13406 IEM_MC_ADVANCE_RIP();
13407 IEM_MC_END();
13408 return VINF_SUCCESS;
13409
13410 case IEMMODE_64BIT:
13411 IEM_MC_BEGIN(3, 2);
13412 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13413 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13414 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13416
13417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13418 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
13419 IEM_MC_ASSIGN(cShiftArg, cShift);
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13422 IEM_MC_FETCH_EFLAGS(EFlags);
13423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13424
13425 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13426 IEM_MC_COMMIT_EFLAGS(EFlags);
13427 IEM_MC_ADVANCE_RIP();
13428 IEM_MC_END();
13429 return VINF_SUCCESS;
13430
13431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13432 }
13433 }
13434}
13435
13436
13437/** Opcode 0xc2. */
13438FNIEMOP_DEF(iemOp_retn_Iw)
13439{
13440 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
13441 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13443 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13444 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
13445}
13446
13447
13448/** Opcode 0xc3. */
13449FNIEMOP_DEF(iemOp_retn)
13450{
13451 IEMOP_MNEMONIC(retn, "retn");
13452 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13454 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
13455}
13456
13457
13458/** Opcode 0xc4. */
13459FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
13460{
13461 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13462 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
13463 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13464 {
13465 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
13466 /* The LES instruction is invalid 64-bit mode. In legacy and
13467 compatability mode it is invalid with MOD=3.
13468 The use as a VEX prefix is made possible by assigning the inverted
13469 REX.R to the top MOD bit, and the top bit in the inverted register
13470 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
13471 to accessing registers 0..7 in this VEX form. */
13472 /** @todo VEX: Just use new tables for it. */
13473 return IEMOP_RAISE_INVALID_OPCODE();
13474 }
13475 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
13476 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
13477}
13478
13479
13480/** Opcode 0xc5. */
13481FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
13482{
13483 /* The LDS instruction is invalid 64-bit mode. In legacy and
13484 compatability mode it is invalid with MOD=3.
13485 The use as a VEX prefix is made possible by assigning the inverted
13486 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
13487 outside of 64-bit mode. VEX is not available in real or v86 mode. */
13488 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13489 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
13490 {
13491 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
13492 {
13493 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
13494 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
13495 }
13496 IEMOP_HLP_NO_REAL_OR_V86_MODE();
13497 }
13498
13499 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
13500 /** @todo Test when exctly the VEX conformance checks kick in during
13501 * instruction decoding and fetching (using \#PF). */
13502 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
13503 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
13504 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
13505#if 0 /* will make sense of this next week... */
13506 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
13507 &&
13508 )
13509 {
13510
13511 }
13512#endif
13513
13514 /** @todo VEX: Just use new tables for it. */
13515 return IEMOP_RAISE_INVALID_OPCODE();
13516}
13517
13518
13519/** Opcode 0xc6. */
13520FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
13521{
13522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13523 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13524 return IEMOP_RAISE_INVALID_OPCODE();
13525 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
13526
13527 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13528 {
13529 /* register access */
13530 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13532 IEM_MC_BEGIN(0, 0);
13533 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
13534 IEM_MC_ADVANCE_RIP();
13535 IEM_MC_END();
13536 }
13537 else
13538 {
13539 /* memory access. */
13540 IEM_MC_BEGIN(0, 1);
13541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13543 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13545 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
13546 IEM_MC_ADVANCE_RIP();
13547 IEM_MC_END();
13548 }
13549 return VINF_SUCCESS;
13550}
13551
13552
13553/** Opcode 0xc7. */
13554FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
13555{
13556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13557 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
13558 return IEMOP_RAISE_INVALID_OPCODE();
13559 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
13560
13561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13562 {
13563 /* register access */
13564 switch (pVCpu->iem.s.enmEffOpSize)
13565 {
13566 case IEMMODE_16BIT:
13567 IEM_MC_BEGIN(0, 0);
13568 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13570 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
13571 IEM_MC_ADVANCE_RIP();
13572 IEM_MC_END();
13573 return VINF_SUCCESS;
13574
13575 case IEMMODE_32BIT:
13576 IEM_MC_BEGIN(0, 0);
13577 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13579 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
13580 IEM_MC_ADVANCE_RIP();
13581 IEM_MC_END();
13582 return VINF_SUCCESS;
13583
13584 case IEMMODE_64BIT:
13585 IEM_MC_BEGIN(0, 0);
13586 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13588 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
13589 IEM_MC_ADVANCE_RIP();
13590 IEM_MC_END();
13591 return VINF_SUCCESS;
13592
13593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13594 }
13595 }
13596 else
13597 {
13598 /* memory access. */
13599 switch (pVCpu->iem.s.enmEffOpSize)
13600 {
13601 case IEMMODE_16BIT:
13602 IEM_MC_BEGIN(0, 1);
13603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13605 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13607 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
13608 IEM_MC_ADVANCE_RIP();
13609 IEM_MC_END();
13610 return VINF_SUCCESS;
13611
13612 case IEMMODE_32BIT:
13613 IEM_MC_BEGIN(0, 1);
13614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13616 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13618 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
13619 IEM_MC_ADVANCE_RIP();
13620 IEM_MC_END();
13621 return VINF_SUCCESS;
13622
13623 case IEMMODE_64BIT:
13624 IEM_MC_BEGIN(0, 1);
13625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13627 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13629 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
13630 IEM_MC_ADVANCE_RIP();
13631 IEM_MC_END();
13632 return VINF_SUCCESS;
13633
13634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13635 }
13636 }
13637}
13638
13639
13640
13641
13642/** Opcode 0xc8. */
13643FNIEMOP_DEF(iemOp_enter_Iw_Ib)
13644{
13645 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
13646 IEMOP_HLP_MIN_186();
13647 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13648 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
13649 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
13650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13651 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
13652}
13653
13654
13655/** Opcode 0xc9. */
13656FNIEMOP_DEF(iemOp_leave)
13657{
13658 IEMOP_MNEMONIC(leave, "leave");
13659 IEMOP_HLP_MIN_186();
13660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13662 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
13663}
13664
13665
13666/** Opcode 0xca. */
13667FNIEMOP_DEF(iemOp_retf_Iw)
13668{
13669 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
13670 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13673 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
13674}
13675
13676
13677/** Opcode 0xcb. */
13678FNIEMOP_DEF(iemOp_retf)
13679{
13680 IEMOP_MNEMONIC(retf, "retf");
13681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13683 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
13684}
13685
13686
13687/** Opcode 0xcc. */
13688FNIEMOP_DEF(iemOp_int_3)
13689{
13690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13691 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
13692}
13693
13694
13695/** Opcode 0xcd. */
13696FNIEMOP_DEF(iemOp_int_Ib)
13697{
13698 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13700 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13701}
13702
13703
13704/** Opcode 0xce. */
13705FNIEMOP_DEF(iemOp_into)
13706{
13707 IEMOP_MNEMONIC(into, "into");
13708 IEMOP_HLP_NO_64BIT();
13709
13710 IEM_MC_BEGIN(2, 0);
13711 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13712 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13713 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13714 IEM_MC_END();
13715 return VINF_SUCCESS;
13716}
13717
13718
13719/** Opcode 0xcf. */
13720FNIEMOP_DEF(iemOp_iret)
13721{
13722 IEMOP_MNEMONIC(iret, "iret");
13723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13724 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13725}
13726
13727
13728/** Opcode 0xd0. */
13729FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13730{
13731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13732 PCIEMOPSHIFTSIZES pImpl;
13733 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13734 {
13735 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13736 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13737 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13738 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13739 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13740 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13741 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13742 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13743 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13744 }
13745 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13746
13747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13748 {
13749 /* register */
13750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13751 IEM_MC_BEGIN(3, 0);
13752 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13753 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13754 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13755 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13756 IEM_MC_REF_EFLAGS(pEFlags);
13757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13758 IEM_MC_ADVANCE_RIP();
13759 IEM_MC_END();
13760 }
13761 else
13762 {
13763 /* memory */
13764 IEM_MC_BEGIN(3, 2);
13765 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13766 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13767 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13769
13770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13772 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13773 IEM_MC_FETCH_EFLAGS(EFlags);
13774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13775
13776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13777 IEM_MC_COMMIT_EFLAGS(EFlags);
13778 IEM_MC_ADVANCE_RIP();
13779 IEM_MC_END();
13780 }
13781 return VINF_SUCCESS;
13782}
13783
13784
13785
13786/** Opcode 0xd1. */
13787FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13788{
13789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13790 PCIEMOPSHIFTSIZES pImpl;
13791 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13792 {
13793 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13794 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13795 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13796 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13797 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13798 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13799 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13800 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13801 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13802 }
13803 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13804
13805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13806 {
13807 /* register */
13808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13809 switch (pVCpu->iem.s.enmEffOpSize)
13810 {
13811 case IEMMODE_16BIT:
13812 IEM_MC_BEGIN(3, 0);
13813 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13814 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13816 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13817 IEM_MC_REF_EFLAGS(pEFlags);
13818 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13819 IEM_MC_ADVANCE_RIP();
13820 IEM_MC_END();
13821 return VINF_SUCCESS;
13822
13823 case IEMMODE_32BIT:
13824 IEM_MC_BEGIN(3, 0);
13825 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13826 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13827 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13828 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13829 IEM_MC_REF_EFLAGS(pEFlags);
13830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13831 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13832 IEM_MC_ADVANCE_RIP();
13833 IEM_MC_END();
13834 return VINF_SUCCESS;
13835
13836 case IEMMODE_64BIT:
13837 IEM_MC_BEGIN(3, 0);
13838 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13839 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13841 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13842 IEM_MC_REF_EFLAGS(pEFlags);
13843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13844 IEM_MC_ADVANCE_RIP();
13845 IEM_MC_END();
13846 return VINF_SUCCESS;
13847
13848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13849 }
13850 }
13851 else
13852 {
13853 /* memory */
13854 switch (pVCpu->iem.s.enmEffOpSize)
13855 {
13856 case IEMMODE_16BIT:
13857 IEM_MC_BEGIN(3, 2);
13858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13859 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13860 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13862
13863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13865 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13866 IEM_MC_FETCH_EFLAGS(EFlags);
13867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13868
13869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13870 IEM_MC_COMMIT_EFLAGS(EFlags);
13871 IEM_MC_ADVANCE_RIP();
13872 IEM_MC_END();
13873 return VINF_SUCCESS;
13874
13875 case IEMMODE_32BIT:
13876 IEM_MC_BEGIN(3, 2);
13877 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13878 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13879 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13881
13882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13884 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13885 IEM_MC_FETCH_EFLAGS(EFlags);
13886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13887
13888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13889 IEM_MC_COMMIT_EFLAGS(EFlags);
13890 IEM_MC_ADVANCE_RIP();
13891 IEM_MC_END();
13892 return VINF_SUCCESS;
13893
13894 case IEMMODE_64BIT:
13895 IEM_MC_BEGIN(3, 2);
13896 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13897 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13898 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13900
13901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13903 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13904 IEM_MC_FETCH_EFLAGS(EFlags);
13905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13906
13907 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13908 IEM_MC_COMMIT_EFLAGS(EFlags);
13909 IEM_MC_ADVANCE_RIP();
13910 IEM_MC_END();
13911 return VINF_SUCCESS;
13912
13913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13914 }
13915 }
13916}
13917
13918
13919/** Opcode 0xd2. */
13920FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13921{
13922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13923 PCIEMOPSHIFTSIZES pImpl;
13924 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13925 {
13926 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13927 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13928 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13929 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13930 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13931 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13932 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13933 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13934 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13935 }
13936 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13937
13938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13939 {
13940 /* register */
13941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13942 IEM_MC_BEGIN(3, 0);
13943 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13944 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13946 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13947 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13948 IEM_MC_REF_EFLAGS(pEFlags);
13949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13950 IEM_MC_ADVANCE_RIP();
13951 IEM_MC_END();
13952 }
13953 else
13954 {
13955 /* memory */
13956 IEM_MC_BEGIN(3, 2);
13957 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13961
13962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13964 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13965 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13966 IEM_MC_FETCH_EFLAGS(EFlags);
13967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13968
13969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13970 IEM_MC_COMMIT_EFLAGS(EFlags);
13971 IEM_MC_ADVANCE_RIP();
13972 IEM_MC_END();
13973 }
13974 return VINF_SUCCESS;
13975}
13976
13977
13978/** Opcode 0xd3. */
13979FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13980{
13981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13982 PCIEMOPSHIFTSIZES pImpl;
13983 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13984 {
13985 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13986 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13987 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13988 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13989 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13990 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13991 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13992 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13993 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13994 }
13995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13996
13997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13998 {
13999 /* register */
14000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14001 switch (pVCpu->iem.s.enmEffOpSize)
14002 {
14003 case IEMMODE_16BIT:
14004 IEM_MC_BEGIN(3, 0);
14005 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14006 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14007 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14008 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14009 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14010 IEM_MC_REF_EFLAGS(pEFlags);
14011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14012 IEM_MC_ADVANCE_RIP();
14013 IEM_MC_END();
14014 return VINF_SUCCESS;
14015
14016 case IEMMODE_32BIT:
14017 IEM_MC_BEGIN(3, 0);
14018 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14019 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14021 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14022 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14023 IEM_MC_REF_EFLAGS(pEFlags);
14024 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
14026 IEM_MC_ADVANCE_RIP();
14027 IEM_MC_END();
14028 return VINF_SUCCESS;
14029
14030 case IEMMODE_64BIT:
14031 IEM_MC_BEGIN(3, 0);
14032 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14033 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14035 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
14036 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14037 IEM_MC_REF_EFLAGS(pEFlags);
14038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14039 IEM_MC_ADVANCE_RIP();
14040 IEM_MC_END();
14041 return VINF_SUCCESS;
14042
14043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14044 }
14045 }
14046 else
14047 {
14048 /* memory */
14049 switch (pVCpu->iem.s.enmEffOpSize)
14050 {
14051 case IEMMODE_16BIT:
14052 IEM_MC_BEGIN(3, 2);
14053 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14054 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14055 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14057
14058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14060 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14061 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14062 IEM_MC_FETCH_EFLAGS(EFlags);
14063 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
14064
14065 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14066 IEM_MC_COMMIT_EFLAGS(EFlags);
14067 IEM_MC_ADVANCE_RIP();
14068 IEM_MC_END();
14069 return VINF_SUCCESS;
14070
14071 case IEMMODE_32BIT:
14072 IEM_MC_BEGIN(3, 2);
14073 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14074 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14075 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14077
14078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14080 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14081 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14082 IEM_MC_FETCH_EFLAGS(EFlags);
14083 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
14084
14085 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14086 IEM_MC_COMMIT_EFLAGS(EFlags);
14087 IEM_MC_ADVANCE_RIP();
14088 IEM_MC_END();
14089 return VINF_SUCCESS;
14090
14091 case IEMMODE_64BIT:
14092 IEM_MC_BEGIN(3, 2);
14093 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14094 IEM_MC_ARG(uint8_t, cShiftArg, 1);
14095 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
14096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14097
14098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14100 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
14101 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
14102 IEM_MC_FETCH_EFLAGS(EFlags);
14103 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
14104
14105 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14106 IEM_MC_COMMIT_EFLAGS(EFlags);
14107 IEM_MC_ADVANCE_RIP();
14108 IEM_MC_END();
14109 return VINF_SUCCESS;
14110
14111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14112 }
14113 }
14114}
14115
14116/** Opcode 0xd4. */
14117FNIEMOP_DEF(iemOp_aam_Ib)
14118{
14119 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
14120 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14122 IEMOP_HLP_NO_64BIT();
14123 if (!bImm)
14124 return IEMOP_RAISE_DIVIDE_ERROR();
14125 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
14126}
14127
14128
14129/** Opcode 0xd5. */
14130FNIEMOP_DEF(iemOp_aad_Ib)
14131{
14132 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
14133 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14135 IEMOP_HLP_NO_64BIT();
14136 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
14137}
14138
14139
14140/** Opcode 0xd6. */
14141FNIEMOP_DEF(iemOp_salc)
14142{
14143 IEMOP_MNEMONIC(salc, "salc");
14144 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
14145 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
14146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14147 IEMOP_HLP_NO_64BIT();
14148
14149 IEM_MC_BEGIN(0, 0);
14150 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
14151 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
14152 } IEM_MC_ELSE() {
14153 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
14154 } IEM_MC_ENDIF();
14155 IEM_MC_ADVANCE_RIP();
14156 IEM_MC_END();
14157 return VINF_SUCCESS;
14158}
14159
14160
14161/** Opcode 0xd7. */
14162FNIEMOP_DEF(iemOp_xlat)
14163{
14164 IEMOP_MNEMONIC(xlat, "xlat");
14165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14166 switch (pVCpu->iem.s.enmEffAddrMode)
14167 {
14168 case IEMMODE_16BIT:
14169 IEM_MC_BEGIN(2, 0);
14170 IEM_MC_LOCAL(uint8_t, u8Tmp);
14171 IEM_MC_LOCAL(uint16_t, u16Addr);
14172 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
14173 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
14174 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
14175 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14176 IEM_MC_ADVANCE_RIP();
14177 IEM_MC_END();
14178 return VINF_SUCCESS;
14179
14180 case IEMMODE_32BIT:
14181 IEM_MC_BEGIN(2, 0);
14182 IEM_MC_LOCAL(uint8_t, u8Tmp);
14183 IEM_MC_LOCAL(uint32_t, u32Addr);
14184 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
14185 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
14186 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
14187 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14188 IEM_MC_ADVANCE_RIP();
14189 IEM_MC_END();
14190 return VINF_SUCCESS;
14191
14192 case IEMMODE_64BIT:
14193 IEM_MC_BEGIN(2, 0);
14194 IEM_MC_LOCAL(uint8_t, u8Tmp);
14195 IEM_MC_LOCAL(uint64_t, u64Addr);
14196 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
14197 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
14198 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
14199 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
14200 IEM_MC_ADVANCE_RIP();
14201 IEM_MC_END();
14202 return VINF_SUCCESS;
14203
14204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14205 }
14206}
14207
14208
14209/**
14210 * Common worker for FPU instructions working on ST0 and STn, and storing the
14211 * result in ST0.
14212 *
14213 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14214 */
14215FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14216{
14217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14218
14219 IEM_MC_BEGIN(3, 1);
14220 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14221 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14222 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14223 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14224
14225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14226 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14227 IEM_MC_PREPARE_FPU_USAGE();
14228 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14229 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14230 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14231 IEM_MC_ELSE()
14232 IEM_MC_FPU_STACK_UNDERFLOW(0);
14233 IEM_MC_ENDIF();
14234 IEM_MC_ADVANCE_RIP();
14235
14236 IEM_MC_END();
14237 return VINF_SUCCESS;
14238}
14239
14240
14241/**
14242 * Common worker for FPU instructions working on ST0 and STn, and only affecting
14243 * flags.
14244 *
14245 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14246 */
14247FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14248{
14249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14250
14251 IEM_MC_BEGIN(3, 1);
14252 IEM_MC_LOCAL(uint16_t, u16Fsw);
14253 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14255 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14256
14257 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14258 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14259 IEM_MC_PREPARE_FPU_USAGE();
14260 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14261 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14262 IEM_MC_UPDATE_FSW(u16Fsw);
14263 IEM_MC_ELSE()
14264 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14265 IEM_MC_ENDIF();
14266 IEM_MC_ADVANCE_RIP();
14267
14268 IEM_MC_END();
14269 return VINF_SUCCESS;
14270}
14271
14272
14273/**
14274 * Common worker for FPU instructions working on ST0 and STn, only affecting
14275 * flags, and popping when done.
14276 *
14277 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14278 */
14279FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14280{
14281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14282
14283 IEM_MC_BEGIN(3, 1);
14284 IEM_MC_LOCAL(uint16_t, u16Fsw);
14285 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14286 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14287 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14288
14289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14291 IEM_MC_PREPARE_FPU_USAGE();
14292 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14293 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14294 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14295 IEM_MC_ELSE()
14296 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
14297 IEM_MC_ENDIF();
14298 IEM_MC_ADVANCE_RIP();
14299
14300 IEM_MC_END();
14301 return VINF_SUCCESS;
14302}
14303
14304
14305/** Opcode 0xd8 11/0. */
14306FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
14307{
14308 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
14309 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
14310}
14311
14312
14313/** Opcode 0xd8 11/1. */
14314FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
14315{
14316 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
14317 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
14318}
14319
14320
14321/** Opcode 0xd8 11/2. */
14322FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
14323{
14324 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
14325 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
14326}
14327
14328
14329/** Opcode 0xd8 11/3. */
14330FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
14331{
14332 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
14333 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
14334}
14335
14336
14337/** Opcode 0xd8 11/4. */
14338FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
14339{
14340 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
14341 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
14342}
14343
14344
14345/** Opcode 0xd8 11/5. */
14346FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
14347{
14348 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
14349 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
14350}
14351
14352
14353/** Opcode 0xd8 11/6. */
14354FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
14355{
14356 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
14357 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
14358}
14359
14360
14361/** Opcode 0xd8 11/7. */
14362FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
14363{
14364 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
14365 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
14366}
14367
14368
14369/**
14370 * Common worker for FPU instructions working on ST0 and an m32r, and storing
14371 * the result in ST0.
14372 *
14373 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14374 */
14375FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
14376{
14377 IEM_MC_BEGIN(3, 3);
14378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14379 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14380 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14381 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14382 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14383 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14384
14385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14387
14388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14390 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14391
14392 IEM_MC_PREPARE_FPU_USAGE();
14393 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14394 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
14395 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14396 IEM_MC_ELSE()
14397 IEM_MC_FPU_STACK_UNDERFLOW(0);
14398 IEM_MC_ENDIF();
14399 IEM_MC_ADVANCE_RIP();
14400
14401 IEM_MC_END();
14402 return VINF_SUCCESS;
14403}
14404
14405
14406/** Opcode 0xd8 !11/0. */
14407FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
14408{
14409 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
14410 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
14411}
14412
14413
14414/** Opcode 0xd8 !11/1. */
14415FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
14416{
14417 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
14418 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
14419}
14420
14421
14422/** Opcode 0xd8 !11/2. */
14423FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
14424{
14425 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
14426
14427 IEM_MC_BEGIN(3, 3);
14428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14429 IEM_MC_LOCAL(uint16_t, u16Fsw);
14430 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14431 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14432 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14433 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14434
14435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14437
14438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14440 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14441
14442 IEM_MC_PREPARE_FPU_USAGE();
14443 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14444 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14445 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14446 IEM_MC_ELSE()
14447 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14448 IEM_MC_ENDIF();
14449 IEM_MC_ADVANCE_RIP();
14450
14451 IEM_MC_END();
14452 return VINF_SUCCESS;
14453}
14454
14455
14456/** Opcode 0xd8 !11/3. */
14457FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
14458{
14459 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
14460
14461 IEM_MC_BEGIN(3, 3);
14462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14463 IEM_MC_LOCAL(uint16_t, u16Fsw);
14464 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
14465 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14466 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14467 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
14468
14469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14471
14472 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14473 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14474 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14475
14476 IEM_MC_PREPARE_FPU_USAGE();
14477 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14478 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
14479 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14480 IEM_MC_ELSE()
14481 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14482 IEM_MC_ENDIF();
14483 IEM_MC_ADVANCE_RIP();
14484
14485 IEM_MC_END();
14486 return VINF_SUCCESS;
14487}
14488
14489
14490/** Opcode 0xd8 !11/4. */
14491FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
14492{
14493 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
14494 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
14495}
14496
14497
14498/** Opcode 0xd8 !11/5. */
14499FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
14500{
14501 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
14502 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
14503}
14504
14505
14506/** Opcode 0xd8 !11/6. */
14507FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
14508{
14509 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
14510 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
14511}
14512
14513
14514/** Opcode 0xd8 !11/7. */
14515FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
14516{
14517 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
14518 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
14519}
14520
14521
14522/** Opcode 0xd8. */
14523FNIEMOP_DEF(iemOp_EscF0)
14524{
14525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14526 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
14527
14528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14529 {
14530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14531 {
14532 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
14533 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
14534 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
14535 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
14536 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
14537 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
14538 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
14539 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
14540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14541 }
14542 }
14543 else
14544 {
14545 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14546 {
14547 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
14548 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
14549 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
14550 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
14551 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
14552 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
14553 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
14554 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
14555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14556 }
14557 }
14558}
14559
14560
14561/** Opcode 0xd9 /0 mem32real
14562 * @sa iemOp_fld_m64r */
14563FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
14564{
14565 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
14566
14567 IEM_MC_BEGIN(2, 3);
14568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14569 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14570 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
14571 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14572 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
14573
14574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14576
14577 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14578 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14579 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14580
14581 IEM_MC_PREPARE_FPU_USAGE();
14582 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14583 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
14584 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14585 IEM_MC_ELSE()
14586 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14587 IEM_MC_ENDIF();
14588 IEM_MC_ADVANCE_RIP();
14589
14590 IEM_MC_END();
14591 return VINF_SUCCESS;
14592}
14593
14594
14595/** Opcode 0xd9 !11/2 mem32real */
14596FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
14597{
14598 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
14599 IEM_MC_BEGIN(3, 2);
14600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14601 IEM_MC_LOCAL(uint16_t, u16Fsw);
14602 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14603 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14605
14606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14610
14611 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14612 IEM_MC_PREPARE_FPU_USAGE();
14613 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14614 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14615 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14616 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14617 IEM_MC_ELSE()
14618 IEM_MC_IF_FCW_IM()
14619 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14620 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14621 IEM_MC_ENDIF();
14622 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14623 IEM_MC_ENDIF();
14624 IEM_MC_ADVANCE_RIP();
14625
14626 IEM_MC_END();
14627 return VINF_SUCCESS;
14628}
14629
14630
14631/** Opcode 0xd9 !11/3 */
14632FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
14633{
14634 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
14635 IEM_MC_BEGIN(3, 2);
14636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14637 IEM_MC_LOCAL(uint16_t, u16Fsw);
14638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14639 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
14640 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14641
14642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14646
14647 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
14648 IEM_MC_PREPARE_FPU_USAGE();
14649 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14650 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
14651 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14652 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14653 IEM_MC_ELSE()
14654 IEM_MC_IF_FCW_IM()
14655 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
14656 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
14657 IEM_MC_ENDIF();
14658 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14659 IEM_MC_ENDIF();
14660 IEM_MC_ADVANCE_RIP();
14661
14662 IEM_MC_END();
14663 return VINF_SUCCESS;
14664}
14665
14666
14667/** Opcode 0xd9 !11/4 */
14668FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
14669{
14670 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
14671 IEM_MC_BEGIN(3, 0);
14672 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14673 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14674 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
14675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14678 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14679 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14680 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
14681 IEM_MC_END();
14682 return VINF_SUCCESS;
14683}
14684
14685
14686/** Opcode 0xd9 !11/5 */
14687FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
14688{
14689 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
14690 IEM_MC_BEGIN(1, 1);
14691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14692 IEM_MC_ARG(uint16_t, u16Fsw, 0);
14693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14696 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14697 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14698 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14699 IEM_MC_END();
14700 return VINF_SUCCESS;
14701}
14702
14703
14704/** Opcode 0xd9 !11/6 */
14705FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14706{
14707 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14708 IEM_MC_BEGIN(3, 0);
14709 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14710 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14711 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14715 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14716 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14717 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14718 IEM_MC_END();
14719 return VINF_SUCCESS;
14720}
14721
14722
14723/** Opcode 0xd9 !11/7 */
14724FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14725{
14726 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14727 IEM_MC_BEGIN(2, 0);
14728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14729 IEM_MC_LOCAL(uint16_t, u16Fcw);
14730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14733 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14734 IEM_MC_FETCH_FCW(u16Fcw);
14735 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14736 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14737 IEM_MC_END();
14738 return VINF_SUCCESS;
14739}
14740
14741
14742/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14743FNIEMOP_DEF(iemOp_fnop)
14744{
14745 IEMOP_MNEMONIC(fnop, "fnop");
14746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14747
14748 IEM_MC_BEGIN(0, 0);
14749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14752 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14753 * intel optimizations. Investigate. */
14754 IEM_MC_UPDATE_FPU_OPCODE_IP();
14755 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14756 IEM_MC_END();
14757 return VINF_SUCCESS;
14758}
14759
14760
14761/** Opcode 0xd9 11/0 stN */
14762FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14763{
14764 IEMOP_MNEMONIC(fld_stN, "fld stN");
14765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14766
14767 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14768 * indicates that it does. */
14769 IEM_MC_BEGIN(0, 2);
14770 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14771 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14774
14775 IEM_MC_PREPARE_FPU_USAGE();
14776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14777 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14778 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14779 IEM_MC_ELSE()
14780 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14781 IEM_MC_ENDIF();
14782
14783 IEM_MC_ADVANCE_RIP();
14784 IEM_MC_END();
14785
14786 return VINF_SUCCESS;
14787}
14788
14789
14790/** Opcode 0xd9 11/3 stN */
14791FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14792{
14793 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14795
14796 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14797 * indicates that it does. */
14798 IEM_MC_BEGIN(1, 3);
14799 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14800 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14801 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14802 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14805
14806 IEM_MC_PREPARE_FPU_USAGE();
14807 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14808 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14809 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14810 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14811 IEM_MC_ELSE()
14812 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14813 IEM_MC_ENDIF();
14814
14815 IEM_MC_ADVANCE_RIP();
14816 IEM_MC_END();
14817
14818 return VINF_SUCCESS;
14819}
14820
14821
14822/** Opcode 0xd9 11/4, 0xdd 11/2. */
14823FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14824{
14825 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14827
14828 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14829 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14830 if (!iDstReg)
14831 {
14832 IEM_MC_BEGIN(0, 1);
14833 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14836
14837 IEM_MC_PREPARE_FPU_USAGE();
14838 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14839 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14840 IEM_MC_ELSE()
14841 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14842 IEM_MC_ENDIF();
14843
14844 IEM_MC_ADVANCE_RIP();
14845 IEM_MC_END();
14846 }
14847 else
14848 {
14849 IEM_MC_BEGIN(0, 2);
14850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14851 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14854
14855 IEM_MC_PREPARE_FPU_USAGE();
14856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14857 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14858 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14859 IEM_MC_ELSE()
14860 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14861 IEM_MC_ENDIF();
14862
14863 IEM_MC_ADVANCE_RIP();
14864 IEM_MC_END();
14865 }
14866 return VINF_SUCCESS;
14867}
14868
14869
14870/**
14871 * Common worker for FPU instructions working on ST0 and replaces it with the
14872 * result, i.e. unary operators.
14873 *
14874 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14875 */
14876FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14877{
14878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14879
14880 IEM_MC_BEGIN(2, 1);
14881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14882 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14883 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14884
14885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14887 IEM_MC_PREPARE_FPU_USAGE();
14888 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14889 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14890 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14891 IEM_MC_ELSE()
14892 IEM_MC_FPU_STACK_UNDERFLOW(0);
14893 IEM_MC_ENDIF();
14894 IEM_MC_ADVANCE_RIP();
14895
14896 IEM_MC_END();
14897 return VINF_SUCCESS;
14898}
14899
14900
14901/** Opcode 0xd9 0xe0. */
14902FNIEMOP_DEF(iemOp_fchs)
14903{
14904 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14905 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14906}
14907
14908
14909/** Opcode 0xd9 0xe1. */
14910FNIEMOP_DEF(iemOp_fabs)
14911{
14912 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14913 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14914}
14915
14916
14917/**
14918 * Common worker for FPU instructions working on ST0 and only returns FSW.
14919 *
14920 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14921 */
14922FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14923{
14924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14925
14926 IEM_MC_BEGIN(2, 1);
14927 IEM_MC_LOCAL(uint16_t, u16Fsw);
14928 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14930
14931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14933 IEM_MC_PREPARE_FPU_USAGE();
14934 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14935 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14936 IEM_MC_UPDATE_FSW(u16Fsw);
14937 IEM_MC_ELSE()
14938 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14939 IEM_MC_ENDIF();
14940 IEM_MC_ADVANCE_RIP();
14941
14942 IEM_MC_END();
14943 return VINF_SUCCESS;
14944}
14945
14946
14947/** Opcode 0xd9 0xe4. */
14948FNIEMOP_DEF(iemOp_ftst)
14949{
14950 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14951 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14952}
14953
14954
14955/** Opcode 0xd9 0xe5. */
14956FNIEMOP_DEF(iemOp_fxam)
14957{
14958 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14959 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14960}
14961
14962
14963/**
14964 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14965 *
14966 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14967 */
14968FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14969{
14970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14971
14972 IEM_MC_BEGIN(1, 1);
14973 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14974 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14975
14976 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14977 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14978 IEM_MC_PREPARE_FPU_USAGE();
14979 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14980 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14981 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14982 IEM_MC_ELSE()
14983 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14984 IEM_MC_ENDIF();
14985 IEM_MC_ADVANCE_RIP();
14986
14987 IEM_MC_END();
14988 return VINF_SUCCESS;
14989}
14990
14991
14992/** Opcode 0xd9 0xe8. */
14993FNIEMOP_DEF(iemOp_fld1)
14994{
14995 IEMOP_MNEMONIC(fld1, "fld1");
14996 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14997}
14998
14999
15000/** Opcode 0xd9 0xe9. */
15001FNIEMOP_DEF(iemOp_fldl2t)
15002{
15003 IEMOP_MNEMONIC(fldl2t, "fldl2t");
15004 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
15005}
15006
15007
15008/** Opcode 0xd9 0xea. */
15009FNIEMOP_DEF(iemOp_fldl2e)
15010{
15011 IEMOP_MNEMONIC(fldl2e, "fldl2e");
15012 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
15013}
15014
15015/** Opcode 0xd9 0xeb. */
15016FNIEMOP_DEF(iemOp_fldpi)
15017{
15018 IEMOP_MNEMONIC(fldpi, "fldpi");
15019 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
15020}
15021
15022
15023/** Opcode 0xd9 0xec. */
15024FNIEMOP_DEF(iemOp_fldlg2)
15025{
15026 IEMOP_MNEMONIC(fldlg2, "fldlg2");
15027 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
15028}
15029
15030/** Opcode 0xd9 0xed. */
15031FNIEMOP_DEF(iemOp_fldln2)
15032{
15033 IEMOP_MNEMONIC(fldln2, "fldln2");
15034 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
15035}
15036
15037
15038/** Opcode 0xd9 0xee. */
15039FNIEMOP_DEF(iemOp_fldz)
15040{
15041 IEMOP_MNEMONIC(fldz, "fldz");
15042 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
15043}
15044
15045
15046/** Opcode 0xd9 0xf0. */
15047FNIEMOP_DEF(iemOp_f2xm1)
15048{
15049 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
15050 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
15051}
15052
15053
15054/**
15055 * Common worker for FPU instructions working on STn and ST0, storing the result
15056 * in STn, and popping the stack unless IE, DE or ZE was raised.
15057 *
15058 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15059 */
15060FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15061{
15062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15063
15064 IEM_MC_BEGIN(3, 1);
15065 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15066 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15069
15070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15072
15073 IEM_MC_PREPARE_FPU_USAGE();
15074 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15075 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15076 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
15077 IEM_MC_ELSE()
15078 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
15079 IEM_MC_ENDIF();
15080 IEM_MC_ADVANCE_RIP();
15081
15082 IEM_MC_END();
15083 return VINF_SUCCESS;
15084}
15085
15086
15087/** Opcode 0xd9 0xf1. */
15088FNIEMOP_DEF(iemOp_fyl2x)
15089{
15090 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
15091 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
15092}
15093
15094
15095/**
15096 * Common worker for FPU instructions working on ST0 and having two outputs, one
15097 * replacing ST0 and one pushed onto the stack.
15098 *
15099 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15100 */
15101FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
15102{
15103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15104
15105 IEM_MC_BEGIN(2, 1);
15106 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
15107 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
15108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
15109
15110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15112 IEM_MC_PREPARE_FPU_USAGE();
15113 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15114 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
15115 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
15116 IEM_MC_ELSE()
15117 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
15118 IEM_MC_ENDIF();
15119 IEM_MC_ADVANCE_RIP();
15120
15121 IEM_MC_END();
15122 return VINF_SUCCESS;
15123}
15124
15125
15126/** Opcode 0xd9 0xf2. */
15127FNIEMOP_DEF(iemOp_fptan)
15128{
15129 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
15130 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
15131}
15132
15133
15134/** Opcode 0xd9 0xf3. */
15135FNIEMOP_DEF(iemOp_fpatan)
15136{
15137 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
15138 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
15139}
15140
15141
15142/** Opcode 0xd9 0xf4. */
15143FNIEMOP_DEF(iemOp_fxtract)
15144{
15145 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
15146 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
15147}
15148
15149
15150/** Opcode 0xd9 0xf5. */
15151FNIEMOP_DEF(iemOp_fprem1)
15152{
15153 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
15154 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
15155}
15156
15157
15158/** Opcode 0xd9 0xf6. */
15159FNIEMOP_DEF(iemOp_fdecstp)
15160{
15161 IEMOP_MNEMONIC(fdecstp, "fdecstp");
15162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15163 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15164 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15165 * FINCSTP and FDECSTP. */
15166
15167 IEM_MC_BEGIN(0,0);
15168
15169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15171
15172 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15173 IEM_MC_FPU_STACK_DEC_TOP();
15174 IEM_MC_UPDATE_FSW_CONST(0);
15175
15176 IEM_MC_ADVANCE_RIP();
15177 IEM_MC_END();
15178 return VINF_SUCCESS;
15179}
15180
15181
15182/** Opcode 0xd9 0xf7. */
15183FNIEMOP_DEF(iemOp_fincstp)
15184{
15185 IEMOP_MNEMONIC(fincstp, "fincstp");
15186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15187 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
15188 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
15189 * FINCSTP and FDECSTP. */
15190
15191 IEM_MC_BEGIN(0,0);
15192
15193 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15194 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15195
15196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15197 IEM_MC_FPU_STACK_INC_TOP();
15198 IEM_MC_UPDATE_FSW_CONST(0);
15199
15200 IEM_MC_ADVANCE_RIP();
15201 IEM_MC_END();
15202 return VINF_SUCCESS;
15203}
15204
15205
15206/** Opcode 0xd9 0xf8. */
15207FNIEMOP_DEF(iemOp_fprem)
15208{
15209 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
15210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
15211}
15212
15213
15214/** Opcode 0xd9 0xf9. */
15215FNIEMOP_DEF(iemOp_fyl2xp1)
15216{
15217 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
15218 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
15219}
15220
15221
15222/** Opcode 0xd9 0xfa. */
15223FNIEMOP_DEF(iemOp_fsqrt)
15224{
15225 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
15226 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
15227}
15228
15229
15230/** Opcode 0xd9 0xfb. */
15231FNIEMOP_DEF(iemOp_fsincos)
15232{
15233 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
15234 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
15235}
15236
15237
15238/** Opcode 0xd9 0xfc. */
15239FNIEMOP_DEF(iemOp_frndint)
15240{
15241 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
15242 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
15243}
15244
15245
15246/** Opcode 0xd9 0xfd. */
15247FNIEMOP_DEF(iemOp_fscale)
15248{
15249 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
15250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
15251}
15252
15253
15254/** Opcode 0xd9 0xfe. */
15255FNIEMOP_DEF(iemOp_fsin)
15256{
15257 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
15258 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
15259}
15260
15261
15262/** Opcode 0xd9 0xff. */
15263FNIEMOP_DEF(iemOp_fcos)
15264{
15265 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
15266 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
15267}
15268
15269
15270/** Used by iemOp_EscF1. */
15271IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
15272{
15273 /* 0xe0 */ iemOp_fchs,
15274 /* 0xe1 */ iemOp_fabs,
15275 /* 0xe2 */ iemOp_Invalid,
15276 /* 0xe3 */ iemOp_Invalid,
15277 /* 0xe4 */ iemOp_ftst,
15278 /* 0xe5 */ iemOp_fxam,
15279 /* 0xe6 */ iemOp_Invalid,
15280 /* 0xe7 */ iemOp_Invalid,
15281 /* 0xe8 */ iemOp_fld1,
15282 /* 0xe9 */ iemOp_fldl2t,
15283 /* 0xea */ iemOp_fldl2e,
15284 /* 0xeb */ iemOp_fldpi,
15285 /* 0xec */ iemOp_fldlg2,
15286 /* 0xed */ iemOp_fldln2,
15287 /* 0xee */ iemOp_fldz,
15288 /* 0xef */ iemOp_Invalid,
15289 /* 0xf0 */ iemOp_f2xm1,
15290 /* 0xf1 */ iemOp_fyl2x,
15291 /* 0xf2 */ iemOp_fptan,
15292 /* 0xf3 */ iemOp_fpatan,
15293 /* 0xf4 */ iemOp_fxtract,
15294 /* 0xf5 */ iemOp_fprem1,
15295 /* 0xf6 */ iemOp_fdecstp,
15296 /* 0xf7 */ iemOp_fincstp,
15297 /* 0xf8 */ iemOp_fprem,
15298 /* 0xf9 */ iemOp_fyl2xp1,
15299 /* 0xfa */ iemOp_fsqrt,
15300 /* 0xfb */ iemOp_fsincos,
15301 /* 0xfc */ iemOp_frndint,
15302 /* 0xfd */ iemOp_fscale,
15303 /* 0xfe */ iemOp_fsin,
15304 /* 0xff */ iemOp_fcos
15305};
15306
15307
15308/** Opcode 0xd9. */
15309FNIEMOP_DEF(iemOp_EscF1)
15310{
15311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15312 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
15313
15314 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15315 {
15316 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15317 {
15318 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
15319 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
15320 case 2:
15321 if (bRm == 0xd0)
15322 return FNIEMOP_CALL(iemOp_fnop);
15323 return IEMOP_RAISE_INVALID_OPCODE();
15324 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
15325 case 4:
15326 case 5:
15327 case 6:
15328 case 7:
15329 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
15330 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
15331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15332 }
15333 }
15334 else
15335 {
15336 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15337 {
15338 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
15339 case 1: return IEMOP_RAISE_INVALID_OPCODE();
15340 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
15341 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
15342 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
15343 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
15344 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
15345 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
15346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15347 }
15348 }
15349}
15350
15351
15352/** Opcode 0xda 11/0. */
15353FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
15354{
15355 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
15356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15357
15358 IEM_MC_BEGIN(0, 1);
15359 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15360
15361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15363
15364 IEM_MC_PREPARE_FPU_USAGE();
15365 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15366 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
15367 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15368 IEM_MC_ENDIF();
15369 IEM_MC_UPDATE_FPU_OPCODE_IP();
15370 IEM_MC_ELSE()
15371 IEM_MC_FPU_STACK_UNDERFLOW(0);
15372 IEM_MC_ENDIF();
15373 IEM_MC_ADVANCE_RIP();
15374
15375 IEM_MC_END();
15376 return VINF_SUCCESS;
15377}
15378
15379
15380/** Opcode 0xda 11/1. */
15381FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
15382{
15383 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
15384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15385
15386 IEM_MC_BEGIN(0, 1);
15387 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15388
15389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15391
15392 IEM_MC_PREPARE_FPU_USAGE();
15393 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15394 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
15395 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15396 IEM_MC_ENDIF();
15397 IEM_MC_UPDATE_FPU_OPCODE_IP();
15398 IEM_MC_ELSE()
15399 IEM_MC_FPU_STACK_UNDERFLOW(0);
15400 IEM_MC_ENDIF();
15401 IEM_MC_ADVANCE_RIP();
15402
15403 IEM_MC_END();
15404 return VINF_SUCCESS;
15405}
15406
15407
15408/** Opcode 0xda 11/2. */
15409FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
15410{
15411 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
15412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15413
15414 IEM_MC_BEGIN(0, 1);
15415 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15416
15417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15419
15420 IEM_MC_PREPARE_FPU_USAGE();
15421 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15422 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15423 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15424 IEM_MC_ENDIF();
15425 IEM_MC_UPDATE_FPU_OPCODE_IP();
15426 IEM_MC_ELSE()
15427 IEM_MC_FPU_STACK_UNDERFLOW(0);
15428 IEM_MC_ENDIF();
15429 IEM_MC_ADVANCE_RIP();
15430
15431 IEM_MC_END();
15432 return VINF_SUCCESS;
15433}
15434
15435
15436/** Opcode 0xda 11/3. */
15437FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
15438{
15439 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
15440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15441
15442 IEM_MC_BEGIN(0, 1);
15443 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15444
15445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15447
15448 IEM_MC_PREPARE_FPU_USAGE();
15449 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
15451 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15452 IEM_MC_ENDIF();
15453 IEM_MC_UPDATE_FPU_OPCODE_IP();
15454 IEM_MC_ELSE()
15455 IEM_MC_FPU_STACK_UNDERFLOW(0);
15456 IEM_MC_ENDIF();
15457 IEM_MC_ADVANCE_RIP();
15458
15459 IEM_MC_END();
15460 return VINF_SUCCESS;
15461}
15462
15463
15464/**
15465 * Common worker for FPU instructions working on ST0 and STn, only affecting
15466 * flags, and popping twice when done.
15467 *
15468 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15469 */
15470FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
15471{
15472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15473
15474 IEM_MC_BEGIN(3, 1);
15475 IEM_MC_LOCAL(uint16_t, u16Fsw);
15476 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15477 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15478 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15479
15480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15482
15483 IEM_MC_PREPARE_FPU_USAGE();
15484 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
15485 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
15486 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
15487 IEM_MC_ELSE()
15488 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
15489 IEM_MC_ENDIF();
15490 IEM_MC_ADVANCE_RIP();
15491
15492 IEM_MC_END();
15493 return VINF_SUCCESS;
15494}
15495
15496
15497/** Opcode 0xda 0xe9. */
15498FNIEMOP_DEF(iemOp_fucompp)
15499{
15500 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
15501 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
15502}
15503
15504
15505/**
15506 * Common worker for FPU instructions working on ST0 and an m32i, and storing
15507 * the result in ST0.
15508 *
15509 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15510 */
15511FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
15512{
15513 IEM_MC_BEGIN(3, 3);
15514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15515 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15516 IEM_MC_LOCAL(int32_t, i32Val2);
15517 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15518 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15519 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15520
15521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15523
15524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15526 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15527
15528 IEM_MC_PREPARE_FPU_USAGE();
15529 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15530 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
15531 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15532 IEM_MC_ELSE()
15533 IEM_MC_FPU_STACK_UNDERFLOW(0);
15534 IEM_MC_ENDIF();
15535 IEM_MC_ADVANCE_RIP();
15536
15537 IEM_MC_END();
15538 return VINF_SUCCESS;
15539}
15540
15541
15542/** Opcode 0xda !11/0. */
15543FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
15544{
15545 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
15546 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
15547}
15548
15549
15550/** Opcode 0xda !11/1. */
15551FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
15552{
15553 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
15554 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
15555}
15556
15557
15558/** Opcode 0xda !11/2. */
15559FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
15560{
15561 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
15562
15563 IEM_MC_BEGIN(3, 3);
15564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15565 IEM_MC_LOCAL(uint16_t, u16Fsw);
15566 IEM_MC_LOCAL(int32_t, i32Val2);
15567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15568 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15569 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15570
15571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15573
15574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15576 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15577
15578 IEM_MC_PREPARE_FPU_USAGE();
15579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15581 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15582 IEM_MC_ELSE()
15583 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15584 IEM_MC_ENDIF();
15585 IEM_MC_ADVANCE_RIP();
15586
15587 IEM_MC_END();
15588 return VINF_SUCCESS;
15589}
15590
15591
15592/** Opcode 0xda !11/3. */
15593FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
15594{
15595 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
15596
15597 IEM_MC_BEGIN(3, 3);
15598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15599 IEM_MC_LOCAL(uint16_t, u16Fsw);
15600 IEM_MC_LOCAL(int32_t, i32Val2);
15601 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15602 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15603 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
15604
15605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15607
15608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15610 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15611
15612 IEM_MC_PREPARE_FPU_USAGE();
15613 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15614 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
15615 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15616 IEM_MC_ELSE()
15617 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15618 IEM_MC_ENDIF();
15619 IEM_MC_ADVANCE_RIP();
15620
15621 IEM_MC_END();
15622 return VINF_SUCCESS;
15623}
15624
15625
15626/** Opcode 0xda !11/4. */
15627FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
15628{
15629 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
15630 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
15631}
15632
15633
15634/** Opcode 0xda !11/5. */
15635FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
15636{
15637 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
15638 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
15639}
15640
15641
15642/** Opcode 0xda !11/6. */
15643FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
15644{
15645 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
15646 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
15647}
15648
15649
15650/** Opcode 0xda !11/7. */
15651FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
15652{
15653 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
15654 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
15655}
15656
15657
15658/** Opcode 0xda. */
15659FNIEMOP_DEF(iemOp_EscF2)
15660{
15661 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15662 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
15663 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15664 {
15665 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15666 {
15667 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
15668 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
15669 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
15670 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
15671 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15672 case 5:
15673 if (bRm == 0xe9)
15674 return FNIEMOP_CALL(iemOp_fucompp);
15675 return IEMOP_RAISE_INVALID_OPCODE();
15676 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15677 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15679 }
15680 }
15681 else
15682 {
15683 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15684 {
15685 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
15686 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
15687 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
15688 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
15689 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
15690 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
15691 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
15692 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
15693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15694 }
15695 }
15696}
15697
15698
15699/** Opcode 0xdb !11/0. */
15700FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15701{
15702 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15703
15704 IEM_MC_BEGIN(2, 3);
15705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15706 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15707 IEM_MC_LOCAL(int32_t, i32Val);
15708 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15709 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15710
15711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15713
15714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15716 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15717
15718 IEM_MC_PREPARE_FPU_USAGE();
15719 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15720 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15721 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15722 IEM_MC_ELSE()
15723 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15724 IEM_MC_ENDIF();
15725 IEM_MC_ADVANCE_RIP();
15726
15727 IEM_MC_END();
15728 return VINF_SUCCESS;
15729}
15730
15731
15732/** Opcode 0xdb !11/1. */
15733FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15734{
15735 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15736 IEM_MC_BEGIN(3, 2);
15737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15738 IEM_MC_LOCAL(uint16_t, u16Fsw);
15739 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15740 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15741 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15742
15743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15747
15748 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15749 IEM_MC_PREPARE_FPU_USAGE();
15750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15751 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15752 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15753 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15754 IEM_MC_ELSE()
15755 IEM_MC_IF_FCW_IM()
15756 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15757 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15758 IEM_MC_ENDIF();
15759 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15760 IEM_MC_ENDIF();
15761 IEM_MC_ADVANCE_RIP();
15762
15763 IEM_MC_END();
15764 return VINF_SUCCESS;
15765}
15766
15767
15768/** Opcode 0xdb !11/2. */
15769FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15770{
15771 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15772 IEM_MC_BEGIN(3, 2);
15773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15774 IEM_MC_LOCAL(uint16_t, u16Fsw);
15775 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15776 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15778
15779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15783
15784 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15785 IEM_MC_PREPARE_FPU_USAGE();
15786 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15787 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15788 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15789 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15790 IEM_MC_ELSE()
15791 IEM_MC_IF_FCW_IM()
15792 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15793 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15794 IEM_MC_ENDIF();
15795 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15796 IEM_MC_ENDIF();
15797 IEM_MC_ADVANCE_RIP();
15798
15799 IEM_MC_END();
15800 return VINF_SUCCESS;
15801}
15802
15803
15804/** Opcode 0xdb !11/3. */
15805FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15806{
15807 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15808 IEM_MC_BEGIN(3, 2);
15809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15810 IEM_MC_LOCAL(uint16_t, u16Fsw);
15811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15812 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15814
15815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15819
15820 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15821 IEM_MC_PREPARE_FPU_USAGE();
15822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15823 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15824 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15825 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15826 IEM_MC_ELSE()
15827 IEM_MC_IF_FCW_IM()
15828 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15829 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15830 IEM_MC_ENDIF();
15831 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15832 IEM_MC_ENDIF();
15833 IEM_MC_ADVANCE_RIP();
15834
15835 IEM_MC_END();
15836 return VINF_SUCCESS;
15837}
15838
15839
15840/** Opcode 0xdb !11/5. */
15841FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15842{
15843 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15844
15845 IEM_MC_BEGIN(2, 3);
15846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15847 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15848 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15849 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15850 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15851
15852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15854
15855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15856 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15857 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15858
15859 IEM_MC_PREPARE_FPU_USAGE();
15860 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15861 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15862 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15863 IEM_MC_ELSE()
15864 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15865 IEM_MC_ENDIF();
15866 IEM_MC_ADVANCE_RIP();
15867
15868 IEM_MC_END();
15869 return VINF_SUCCESS;
15870}
15871
15872
15873/** Opcode 0xdb !11/7. */
15874FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15875{
15876 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15877 IEM_MC_BEGIN(3, 2);
15878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15879 IEM_MC_LOCAL(uint16_t, u16Fsw);
15880 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15881 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15882 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15883
15884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15886 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15887 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15888
15889 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15890 IEM_MC_PREPARE_FPU_USAGE();
15891 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15892 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15893 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15894 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15895 IEM_MC_ELSE()
15896 IEM_MC_IF_FCW_IM()
15897 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15898 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15899 IEM_MC_ENDIF();
15900 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15901 IEM_MC_ENDIF();
15902 IEM_MC_ADVANCE_RIP();
15903
15904 IEM_MC_END();
15905 return VINF_SUCCESS;
15906}
15907
15908
15909/** Opcode 0xdb 11/0. */
15910FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15911{
15912 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15914
15915 IEM_MC_BEGIN(0, 1);
15916 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15917
15918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15920
15921 IEM_MC_PREPARE_FPU_USAGE();
15922 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15923 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15924 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15925 IEM_MC_ENDIF();
15926 IEM_MC_UPDATE_FPU_OPCODE_IP();
15927 IEM_MC_ELSE()
15928 IEM_MC_FPU_STACK_UNDERFLOW(0);
15929 IEM_MC_ENDIF();
15930 IEM_MC_ADVANCE_RIP();
15931
15932 IEM_MC_END();
15933 return VINF_SUCCESS;
15934}
15935
15936
15937/** Opcode 0xdb 11/1. */
15938FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15939{
15940 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15942
15943 IEM_MC_BEGIN(0, 1);
15944 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15945
15946 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15947 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15948
15949 IEM_MC_PREPARE_FPU_USAGE();
15950 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15951 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15952 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15953 IEM_MC_ENDIF();
15954 IEM_MC_UPDATE_FPU_OPCODE_IP();
15955 IEM_MC_ELSE()
15956 IEM_MC_FPU_STACK_UNDERFLOW(0);
15957 IEM_MC_ENDIF();
15958 IEM_MC_ADVANCE_RIP();
15959
15960 IEM_MC_END();
15961 return VINF_SUCCESS;
15962}
15963
15964
15965/** Opcode 0xdb 11/2. */
15966FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15967{
15968 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15970
15971 IEM_MC_BEGIN(0, 1);
15972 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15973
15974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15976
15977 IEM_MC_PREPARE_FPU_USAGE();
15978 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15979 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15980 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15981 IEM_MC_ENDIF();
15982 IEM_MC_UPDATE_FPU_OPCODE_IP();
15983 IEM_MC_ELSE()
15984 IEM_MC_FPU_STACK_UNDERFLOW(0);
15985 IEM_MC_ENDIF();
15986 IEM_MC_ADVANCE_RIP();
15987
15988 IEM_MC_END();
15989 return VINF_SUCCESS;
15990}
15991
15992
15993/** Opcode 0xdb 11/3. */
15994FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15995{
15996 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15998
15999 IEM_MC_BEGIN(0, 1);
16000 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
16001
16002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16003 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16004
16005 IEM_MC_PREPARE_FPU_USAGE();
16006 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
16007 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
16008 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
16009 IEM_MC_ENDIF();
16010 IEM_MC_UPDATE_FPU_OPCODE_IP();
16011 IEM_MC_ELSE()
16012 IEM_MC_FPU_STACK_UNDERFLOW(0);
16013 IEM_MC_ENDIF();
16014 IEM_MC_ADVANCE_RIP();
16015
16016 IEM_MC_END();
16017 return VINF_SUCCESS;
16018}
16019
16020
16021/** Opcode 0xdb 0xe0. */
16022FNIEMOP_DEF(iemOp_fneni)
16023{
16024 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
16025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16026 IEM_MC_BEGIN(0,0);
16027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16028 IEM_MC_ADVANCE_RIP();
16029 IEM_MC_END();
16030 return VINF_SUCCESS;
16031}
16032
16033
16034/** Opcode 0xdb 0xe1. */
16035FNIEMOP_DEF(iemOp_fndisi)
16036{
16037 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
16038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16039 IEM_MC_BEGIN(0,0);
16040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16041 IEM_MC_ADVANCE_RIP();
16042 IEM_MC_END();
16043 return VINF_SUCCESS;
16044}
16045
16046
16047/** Opcode 0xdb 0xe2. */
16048FNIEMOP_DEF(iemOp_fnclex)
16049{
16050 IEMOP_MNEMONIC(fnclex, "fnclex");
16051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16052
16053 IEM_MC_BEGIN(0,0);
16054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16055 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16056 IEM_MC_CLEAR_FSW_EX();
16057 IEM_MC_ADVANCE_RIP();
16058 IEM_MC_END();
16059 return VINF_SUCCESS;
16060}
16061
16062
16063/** Opcode 0xdb 0xe3. */
16064FNIEMOP_DEF(iemOp_fninit)
16065{
16066 IEMOP_MNEMONIC(fninit, "fninit");
16067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16068 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
16069}
16070
16071
16072/** Opcode 0xdb 0xe4. */
16073FNIEMOP_DEF(iemOp_fnsetpm)
16074{
16075 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
16076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16077 IEM_MC_BEGIN(0,0);
16078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16079 IEM_MC_ADVANCE_RIP();
16080 IEM_MC_END();
16081 return VINF_SUCCESS;
16082}
16083
16084
16085/** Opcode 0xdb 0xe5. */
16086FNIEMOP_DEF(iemOp_frstpm)
16087{
16088 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
16089#if 0 /* #UDs on newer CPUs */
16090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16091 IEM_MC_BEGIN(0,0);
16092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16093 IEM_MC_ADVANCE_RIP();
16094 IEM_MC_END();
16095 return VINF_SUCCESS;
16096#else
16097 return IEMOP_RAISE_INVALID_OPCODE();
16098#endif
16099}
16100
16101
16102/** Opcode 0xdb 11/5. */
16103FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
16104{
16105 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
16106 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
16107}
16108
16109
16110/** Opcode 0xdb 11/6. */
16111FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
16112{
16113 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
16114 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
16115}
16116
16117
16118/** Opcode 0xdb. */
16119FNIEMOP_DEF(iemOp_EscF3)
16120{
16121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16122 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
16123 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16124 {
16125 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16126 {
16127 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
16128 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
16129 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
16130 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
16131 case 4:
16132 switch (bRm)
16133 {
16134 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
16135 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
16136 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
16137 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
16138 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
16139 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
16140 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
16141 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
16142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16143 }
16144 break;
16145 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
16146 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
16147 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16149 }
16150 }
16151 else
16152 {
16153 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16154 {
16155 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
16156 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
16157 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
16158 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
16159 case 4: return IEMOP_RAISE_INVALID_OPCODE();
16160 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
16161 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16162 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
16163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16164 }
16165 }
16166}
16167
16168
16169/**
16170 * Common worker for FPU instructions working on STn and ST0, and storing the
16171 * result in STn unless IE, DE or ZE was raised.
16172 *
16173 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16174 */
16175FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
16176{
16177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16178
16179 IEM_MC_BEGIN(3, 1);
16180 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16181 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16183 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
16184
16185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16187
16188 IEM_MC_PREPARE_FPU_USAGE();
16189 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
16190 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
16191 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16192 IEM_MC_ELSE()
16193 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16194 IEM_MC_ENDIF();
16195 IEM_MC_ADVANCE_RIP();
16196
16197 IEM_MC_END();
16198 return VINF_SUCCESS;
16199}
16200
16201
16202/** Opcode 0xdc 11/0. */
16203FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
16204{
16205 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
16206 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
16207}
16208
16209
16210/** Opcode 0xdc 11/1. */
16211FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
16212{
16213 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
16214 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
16215}
16216
16217
16218/** Opcode 0xdc 11/4. */
16219FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
16220{
16221 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
16222 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
16223}
16224
16225
16226/** Opcode 0xdc 11/5. */
16227FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
16228{
16229 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
16230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
16231}
16232
16233
16234/** Opcode 0xdc 11/6. */
16235FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
16236{
16237 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
16238 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
16239}
16240
16241
16242/** Opcode 0xdc 11/7. */
16243FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
16244{
16245 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
16246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
16247}
16248
16249
16250/**
16251 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
16252 * memory operand, and storing the result in ST0.
16253 *
16254 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16255 */
16256FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
16257{
16258 IEM_MC_BEGIN(3, 3);
16259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16260 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16261 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
16262 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16263 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
16264 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
16265
16266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16269 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16270
16271 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16272 IEM_MC_PREPARE_FPU_USAGE();
16273 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
16274 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
16275 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16276 IEM_MC_ELSE()
16277 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16278 IEM_MC_ENDIF();
16279 IEM_MC_ADVANCE_RIP();
16280
16281 IEM_MC_END();
16282 return VINF_SUCCESS;
16283}
16284
16285
16286/** Opcode 0xdc !11/0. */
16287FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
16288{
16289 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
16290 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
16291}
16292
16293
16294/** Opcode 0xdc !11/1. */
16295FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
16296{
16297 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
16298 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
16299}
16300
16301
16302/** Opcode 0xdc !11/2. */
16303FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
16304{
16305 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
16306
16307 IEM_MC_BEGIN(3, 3);
16308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16309 IEM_MC_LOCAL(uint16_t, u16Fsw);
16310 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16311 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16313 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16314
16315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16317
16318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16320 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16321
16322 IEM_MC_PREPARE_FPU_USAGE();
16323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16324 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16325 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16326 IEM_MC_ELSE()
16327 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16328 IEM_MC_ENDIF();
16329 IEM_MC_ADVANCE_RIP();
16330
16331 IEM_MC_END();
16332 return VINF_SUCCESS;
16333}
16334
16335
16336/** Opcode 0xdc !11/3. */
16337FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
16338{
16339 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
16340
16341 IEM_MC_BEGIN(3, 3);
16342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16343 IEM_MC_LOCAL(uint16_t, u16Fsw);
16344 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
16345 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16346 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16347 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
16348
16349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16351
16352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16353 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16354 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16355
16356 IEM_MC_PREPARE_FPU_USAGE();
16357 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16358 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
16359 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16360 IEM_MC_ELSE()
16361 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16362 IEM_MC_ENDIF();
16363 IEM_MC_ADVANCE_RIP();
16364
16365 IEM_MC_END();
16366 return VINF_SUCCESS;
16367}
16368
16369
16370/** Opcode 0xdc !11/4. */
16371FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
16372{
16373 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
16374 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
16375}
16376
16377
16378/** Opcode 0xdc !11/5. */
16379FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
16380{
16381 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
16382 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
16383}
16384
16385
16386/** Opcode 0xdc !11/6. */
16387FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
16388{
16389 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
16390 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
16391}
16392
16393
16394/** Opcode 0xdc !11/7. */
16395FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
16396{
16397 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
16398 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
16399}
16400
16401
16402/** Opcode 0xdc. */
16403FNIEMOP_DEF(iemOp_EscF4)
16404{
16405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16406 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
16407 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16408 {
16409 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16410 {
16411 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
16412 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
16413 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
16414 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
16415 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
16416 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
16417 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
16418 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
16419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16420 }
16421 }
16422 else
16423 {
16424 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16425 {
16426 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
16427 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
16428 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
16429 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
16430 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
16431 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
16432 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
16433 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
16434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16435 }
16436 }
16437}
16438
16439
16440/** Opcode 0xdd !11/0.
16441 * @sa iemOp_fld_m32r */
16442FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
16443{
16444 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
16445
16446 IEM_MC_BEGIN(2, 3);
16447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16448 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16449 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
16450 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16451 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
16452
16453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16457
16458 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16459 IEM_MC_PREPARE_FPU_USAGE();
16460 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16461 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
16462 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16463 IEM_MC_ELSE()
16464 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16465 IEM_MC_ENDIF();
16466 IEM_MC_ADVANCE_RIP();
16467
16468 IEM_MC_END();
16469 return VINF_SUCCESS;
16470}
16471
16472
16473/** Opcode 0xdd !11/0. */
16474FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
16475{
16476 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
16477 IEM_MC_BEGIN(3, 2);
16478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16479 IEM_MC_LOCAL(uint16_t, u16Fsw);
16480 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16481 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16483
16484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16488
16489 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16490 IEM_MC_PREPARE_FPU_USAGE();
16491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16493 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16495 IEM_MC_ELSE()
16496 IEM_MC_IF_FCW_IM()
16497 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16498 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16499 IEM_MC_ENDIF();
16500 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16501 IEM_MC_ENDIF();
16502 IEM_MC_ADVANCE_RIP();
16503
16504 IEM_MC_END();
16505 return VINF_SUCCESS;
16506}
16507
16508
16509/** Opcode 0xdd !11/0. */
16510FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
16511{
16512 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
16513 IEM_MC_BEGIN(3, 2);
16514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16515 IEM_MC_LOCAL(uint16_t, u16Fsw);
16516 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16517 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16518 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16519
16520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16523 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16524
16525 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16526 IEM_MC_PREPARE_FPU_USAGE();
16527 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16528 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16529 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16530 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16531 IEM_MC_ELSE()
16532 IEM_MC_IF_FCW_IM()
16533 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16534 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16535 IEM_MC_ENDIF();
16536 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16537 IEM_MC_ENDIF();
16538 IEM_MC_ADVANCE_RIP();
16539
16540 IEM_MC_END();
16541 return VINF_SUCCESS;
16542}
16543
16544
16545
16546
16547/** Opcode 0xdd !11/0. */
16548FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
16549{
16550 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
16551 IEM_MC_BEGIN(3, 2);
16552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16553 IEM_MC_LOCAL(uint16_t, u16Fsw);
16554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16555 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
16556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16557
16558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16562
16563 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16564 IEM_MC_PREPARE_FPU_USAGE();
16565 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16566 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
16567 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16568 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16569 IEM_MC_ELSE()
16570 IEM_MC_IF_FCW_IM()
16571 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
16572 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
16573 IEM_MC_ENDIF();
16574 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16575 IEM_MC_ENDIF();
16576 IEM_MC_ADVANCE_RIP();
16577
16578 IEM_MC_END();
16579 return VINF_SUCCESS;
16580}
16581
16582
16583/** Opcode 0xdd !11/0. */
16584FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
16585{
16586 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
16587 IEM_MC_BEGIN(3, 0);
16588 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16589 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16590 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
16591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16595 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16596 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
16597 IEM_MC_END();
16598 return VINF_SUCCESS;
16599}
16600
16601
16602/** Opcode 0xdd !11/0. */
16603FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
16604{
16605 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
16606 IEM_MC_BEGIN(3, 0);
16607 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
16608 IEM_MC_ARG(uint8_t, iEffSeg, 1);
16609 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
16610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16614 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
16615 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
16616 IEM_MC_END();
16617 return VINF_SUCCESS;
16618
16619}
16620
16621/** Opcode 0xdd !11/0. */
16622FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
16623{
16624 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
16625
16626 IEM_MC_BEGIN(0, 2);
16627 IEM_MC_LOCAL(uint16_t, u16Tmp);
16628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16629
16630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16633
16634 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16635 IEM_MC_FETCH_FSW(u16Tmp);
16636 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
16637 IEM_MC_ADVANCE_RIP();
16638
16639/** @todo Debug / drop a hint to the verifier that things may differ
16640 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
16641 * NT4SP1. (X86_FSW_PE) */
16642 IEM_MC_END();
16643 return VINF_SUCCESS;
16644}
16645
16646
16647/** Opcode 0xdd 11/0. */
16648FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
16649{
16650 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
16651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16652 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
16653 unmodified. */
16654
16655 IEM_MC_BEGIN(0, 0);
16656
16657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16659
16660 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16661 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16662 IEM_MC_UPDATE_FPU_OPCODE_IP();
16663
16664 IEM_MC_ADVANCE_RIP();
16665 IEM_MC_END();
16666 return VINF_SUCCESS;
16667}
16668
16669
16670/** Opcode 0xdd 11/1. */
16671FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
16672{
16673 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
16674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16675
16676 IEM_MC_BEGIN(0, 2);
16677 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
16678 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16681
16682 IEM_MC_PREPARE_FPU_USAGE();
16683 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16684 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
16685 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
16686 IEM_MC_ELSE()
16687 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
16688 IEM_MC_ENDIF();
16689
16690 IEM_MC_ADVANCE_RIP();
16691 IEM_MC_END();
16692 return VINF_SUCCESS;
16693}
16694
16695
16696/** Opcode 0xdd 11/3. */
16697FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16698{
16699 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16700 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16701}
16702
16703
16704/** Opcode 0xdd 11/4. */
16705FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16706{
16707 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16708 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16709}
16710
16711
16712/** Opcode 0xdd. */
16713FNIEMOP_DEF(iemOp_EscF5)
16714{
16715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16716 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16718 {
16719 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16720 {
16721 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16722 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16723 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16724 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16725 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16726 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16727 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16728 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16730 }
16731 }
16732 else
16733 {
16734 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16735 {
16736 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16737 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16738 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16739 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16740 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16741 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16742 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16743 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16745 }
16746 }
16747}
16748
16749
16750/** Opcode 0xde 11/0. */
16751FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16752{
16753 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16754 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16755}
16756
16757
16758/** Opcode 0xde 11/0. */
16759FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16760{
16761 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16762 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16763}
16764
16765
16766/** Opcode 0xde 0xd9. */
16767FNIEMOP_DEF(iemOp_fcompp)
16768{
16769 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16770 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16771}
16772
16773
16774/** Opcode 0xde 11/4. */
16775FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16776{
16777 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16779}
16780
16781
16782/** Opcode 0xde 11/5. */
16783FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16784{
16785 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16787}
16788
16789
16790/** Opcode 0xde 11/6. */
16791FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16792{
16793 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16794 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16795}
16796
16797
16798/** Opcode 0xde 11/7. */
16799FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16800{
16801 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16803}
16804
16805
16806/**
16807 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16808 * the result in ST0.
16809 *
16810 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16811 */
16812FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16813{
16814 IEM_MC_BEGIN(3, 3);
16815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16816 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16817 IEM_MC_LOCAL(int16_t, i16Val2);
16818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16820 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16821
16822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16824
16825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16827 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16828
16829 IEM_MC_PREPARE_FPU_USAGE();
16830 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16831 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16832 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16833 IEM_MC_ELSE()
16834 IEM_MC_FPU_STACK_UNDERFLOW(0);
16835 IEM_MC_ENDIF();
16836 IEM_MC_ADVANCE_RIP();
16837
16838 IEM_MC_END();
16839 return VINF_SUCCESS;
16840}
16841
16842
16843/** Opcode 0xde !11/0. */
16844FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16845{
16846 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16847 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16848}
16849
16850
16851/** Opcode 0xde !11/1. */
16852FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16853{
16854 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16856}
16857
16858
16859/** Opcode 0xde !11/2. */
16860FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16861{
16862 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16863
16864 IEM_MC_BEGIN(3, 3);
16865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16866 IEM_MC_LOCAL(uint16_t, u16Fsw);
16867 IEM_MC_LOCAL(int16_t, i16Val2);
16868 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16869 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16870 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16871
16872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16874
16875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16877 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16878
16879 IEM_MC_PREPARE_FPU_USAGE();
16880 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16881 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16882 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16883 IEM_MC_ELSE()
16884 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16885 IEM_MC_ENDIF();
16886 IEM_MC_ADVANCE_RIP();
16887
16888 IEM_MC_END();
16889 return VINF_SUCCESS;
16890}
16891
16892
16893/** Opcode 0xde !11/3. */
16894FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16895{
16896 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16897
16898 IEM_MC_BEGIN(3, 3);
16899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16900 IEM_MC_LOCAL(uint16_t, u16Fsw);
16901 IEM_MC_LOCAL(int16_t, i16Val2);
16902 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16903 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16904 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16905
16906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16908
16909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16911 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16912
16913 IEM_MC_PREPARE_FPU_USAGE();
16914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16915 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16916 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16917 IEM_MC_ELSE()
16918 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16919 IEM_MC_ENDIF();
16920 IEM_MC_ADVANCE_RIP();
16921
16922 IEM_MC_END();
16923 return VINF_SUCCESS;
16924}
16925
16926
16927/** Opcode 0xde !11/4. */
16928FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16929{
16930 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16931 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16932}
16933
16934
16935/** Opcode 0xde !11/5. */
16936FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16937{
16938 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16939 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16940}
16941
16942
16943/** Opcode 0xde !11/6. */
16944FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16945{
16946 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16947 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16948}
16949
16950
16951/** Opcode 0xde !11/7. */
16952FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16953{
16954 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16955 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16956}
16957
16958
16959/** Opcode 0xde. */
16960FNIEMOP_DEF(iemOp_EscF6)
16961{
16962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16963 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16965 {
16966 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16967 {
16968 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16969 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16970 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16971 case 3: if (bRm == 0xd9)
16972 return FNIEMOP_CALL(iemOp_fcompp);
16973 return IEMOP_RAISE_INVALID_OPCODE();
16974 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16975 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16976 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16977 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16979 }
16980 }
16981 else
16982 {
16983 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16984 {
16985 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16986 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16987 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16988 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16989 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16990 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16991 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16992 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16994 }
16995 }
16996}
16997
16998
16999/** Opcode 0xdf 11/0.
17000 * Undocument instruction, assumed to work like ffree + fincstp. */
17001FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
17002{
17003 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
17004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17005
17006 IEM_MC_BEGIN(0, 0);
17007
17008 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17009 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17010
17011 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
17012 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
17013 IEM_MC_FPU_STACK_INC_TOP();
17014 IEM_MC_UPDATE_FPU_OPCODE_IP();
17015
17016 IEM_MC_ADVANCE_RIP();
17017 IEM_MC_END();
17018 return VINF_SUCCESS;
17019}
17020
17021
17022/** Opcode 0xdf 0xe0. */
17023FNIEMOP_DEF(iemOp_fnstsw_ax)
17024{
17025 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
17026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17027
17028 IEM_MC_BEGIN(0, 1);
17029 IEM_MC_LOCAL(uint16_t, u16Tmp);
17030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17031 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
17032 IEM_MC_FETCH_FSW(u16Tmp);
17033 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
17034 IEM_MC_ADVANCE_RIP();
17035 IEM_MC_END();
17036 return VINF_SUCCESS;
17037}
17038
17039
17040/** Opcode 0xdf 11/5. */
17041FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
17042{
17043 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
17044 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17045}
17046
17047
17048/** Opcode 0xdf 11/6. */
17049FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
17050{
17051 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
17052 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
17053}
17054
17055
17056/** Opcode 0xdf !11/0. */
17057FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
17058{
17059 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
17060
17061 IEM_MC_BEGIN(2, 3);
17062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17063 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17064 IEM_MC_LOCAL(int16_t, i16Val);
17065 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17066 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
17067
17068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17070
17071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17073 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17074
17075 IEM_MC_PREPARE_FPU_USAGE();
17076 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17077 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
17078 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17079 IEM_MC_ELSE()
17080 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17081 IEM_MC_ENDIF();
17082 IEM_MC_ADVANCE_RIP();
17083
17084 IEM_MC_END();
17085 return VINF_SUCCESS;
17086}
17087
17088
17089/** Opcode 0xdf !11/1. */
17090FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
17091{
17092 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
17093 IEM_MC_BEGIN(3, 2);
17094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17095 IEM_MC_LOCAL(uint16_t, u16Fsw);
17096 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17097 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17098 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17099
17100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17102 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17103 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17104
17105 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17106 IEM_MC_PREPARE_FPU_USAGE();
17107 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17108 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17109 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17110 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17111 IEM_MC_ELSE()
17112 IEM_MC_IF_FCW_IM()
17113 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17114 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17115 IEM_MC_ENDIF();
17116 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17117 IEM_MC_ENDIF();
17118 IEM_MC_ADVANCE_RIP();
17119
17120 IEM_MC_END();
17121 return VINF_SUCCESS;
17122}
17123
17124
17125/** Opcode 0xdf !11/2. */
17126FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
17127{
17128 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
17129 IEM_MC_BEGIN(3, 2);
17130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17131 IEM_MC_LOCAL(uint16_t, u16Fsw);
17132 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17133 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17135
17136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17138 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17139 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17140
17141 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17142 IEM_MC_PREPARE_FPU_USAGE();
17143 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17144 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17145 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17146 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17147 IEM_MC_ELSE()
17148 IEM_MC_IF_FCW_IM()
17149 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17150 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17151 IEM_MC_ENDIF();
17152 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17153 IEM_MC_ENDIF();
17154 IEM_MC_ADVANCE_RIP();
17155
17156 IEM_MC_END();
17157 return VINF_SUCCESS;
17158}
17159
17160
17161/** Opcode 0xdf !11/3. */
17162FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
17163{
17164 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
17165 IEM_MC_BEGIN(3, 2);
17166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17167 IEM_MC_LOCAL(uint16_t, u16Fsw);
17168 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17169 IEM_MC_ARG(int16_t *, pi16Dst, 1);
17170 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17171
17172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17176
17177 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17178 IEM_MC_PREPARE_FPU_USAGE();
17179 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17180 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
17181 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
17182 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17183 IEM_MC_ELSE()
17184 IEM_MC_IF_FCW_IM()
17185 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
17186 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
17187 IEM_MC_ENDIF();
17188 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17189 IEM_MC_ENDIF();
17190 IEM_MC_ADVANCE_RIP();
17191
17192 IEM_MC_END();
17193 return VINF_SUCCESS;
17194}
17195
17196
17197/** Opcode 0xdf !11/4. */
17198FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
17199
17200
17201/** Opcode 0xdf !11/5. */
17202FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
17203{
17204 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
17205
17206 IEM_MC_BEGIN(2, 3);
17207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17208 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
17209 IEM_MC_LOCAL(int64_t, i64Val);
17210 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
17211 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
17212
17213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17215
17216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17218 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17219
17220 IEM_MC_PREPARE_FPU_USAGE();
17221 IEM_MC_IF_FPUREG_IS_EMPTY(7)
17222 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
17223 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17224 IEM_MC_ELSE()
17225 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17226 IEM_MC_ENDIF();
17227 IEM_MC_ADVANCE_RIP();
17228
17229 IEM_MC_END();
17230 return VINF_SUCCESS;
17231}
17232
17233
17234/** Opcode 0xdf !11/6. */
17235FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
17236
17237
17238/** Opcode 0xdf !11/7. */
17239FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
17240{
17241 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
17242 IEM_MC_BEGIN(3, 2);
17243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17244 IEM_MC_LOCAL(uint16_t, u16Fsw);
17245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
17246 IEM_MC_ARG(int64_t *, pi64Dst, 1);
17247 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
17248
17249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
17252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
17253
17254 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
17255 IEM_MC_PREPARE_FPU_USAGE();
17256 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
17257 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
17258 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
17259 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17260 IEM_MC_ELSE()
17261 IEM_MC_IF_FCW_IM()
17262 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
17263 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
17264 IEM_MC_ENDIF();
17265 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17266 IEM_MC_ENDIF();
17267 IEM_MC_ADVANCE_RIP();
17268
17269 IEM_MC_END();
17270 return VINF_SUCCESS;
17271}
17272
17273
17274/** Opcode 0xdf. */
17275FNIEMOP_DEF(iemOp_EscF7)
17276{
17277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17278 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17279 {
17280 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17281 {
17282 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
17283 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
17284 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17285 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
17286 case 4: if (bRm == 0xe0)
17287 return FNIEMOP_CALL(iemOp_fnstsw_ax);
17288 return IEMOP_RAISE_INVALID_OPCODE();
17289 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
17290 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
17291 case 7: return IEMOP_RAISE_INVALID_OPCODE();
17292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17293 }
17294 }
17295 else
17296 {
17297 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17298 {
17299 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
17300 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
17301 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
17302 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
17303 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
17304 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
17305 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
17306 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
17307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17308 }
17309 }
17310}
17311
17312
17313/** Opcode 0xe0. */
17314FNIEMOP_DEF(iemOp_loopne_Jb)
17315{
17316 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
17317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17320
17321 switch (pVCpu->iem.s.enmEffAddrMode)
17322 {
17323 case IEMMODE_16BIT:
17324 IEM_MC_BEGIN(0,0);
17325 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17326 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17327 IEM_MC_REL_JMP_S8(i8Imm);
17328 } IEM_MC_ELSE() {
17329 IEM_MC_ADVANCE_RIP();
17330 } IEM_MC_ENDIF();
17331 IEM_MC_END();
17332 return VINF_SUCCESS;
17333
17334 case IEMMODE_32BIT:
17335 IEM_MC_BEGIN(0,0);
17336 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17337 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17338 IEM_MC_REL_JMP_S8(i8Imm);
17339 } IEM_MC_ELSE() {
17340 IEM_MC_ADVANCE_RIP();
17341 } IEM_MC_ENDIF();
17342 IEM_MC_END();
17343 return VINF_SUCCESS;
17344
17345 case IEMMODE_64BIT:
17346 IEM_MC_BEGIN(0,0);
17347 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17348 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
17349 IEM_MC_REL_JMP_S8(i8Imm);
17350 } IEM_MC_ELSE() {
17351 IEM_MC_ADVANCE_RIP();
17352 } IEM_MC_ENDIF();
17353 IEM_MC_END();
17354 return VINF_SUCCESS;
17355
17356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17357 }
17358}
17359
17360
17361/** Opcode 0xe1. */
17362FNIEMOP_DEF(iemOp_loope_Jb)
17363{
17364 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
17365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17368
17369 switch (pVCpu->iem.s.enmEffAddrMode)
17370 {
17371 case IEMMODE_16BIT:
17372 IEM_MC_BEGIN(0,0);
17373 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17374 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17375 IEM_MC_REL_JMP_S8(i8Imm);
17376 } IEM_MC_ELSE() {
17377 IEM_MC_ADVANCE_RIP();
17378 } IEM_MC_ENDIF();
17379 IEM_MC_END();
17380 return VINF_SUCCESS;
17381
17382 case IEMMODE_32BIT:
17383 IEM_MC_BEGIN(0,0);
17384 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17385 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17386 IEM_MC_REL_JMP_S8(i8Imm);
17387 } IEM_MC_ELSE() {
17388 IEM_MC_ADVANCE_RIP();
17389 } IEM_MC_ENDIF();
17390 IEM_MC_END();
17391 return VINF_SUCCESS;
17392
17393 case IEMMODE_64BIT:
17394 IEM_MC_BEGIN(0,0);
17395 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17396 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
17397 IEM_MC_REL_JMP_S8(i8Imm);
17398 } IEM_MC_ELSE() {
17399 IEM_MC_ADVANCE_RIP();
17400 } IEM_MC_ENDIF();
17401 IEM_MC_END();
17402 return VINF_SUCCESS;
17403
17404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17405 }
17406}
17407
17408
17409/** Opcode 0xe2. */
17410FNIEMOP_DEF(iemOp_loop_Jb)
17411{
17412 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
17413 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17416
17417 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
17418 * using the 32-bit operand size override. How can that be restarted? See
17419 * weird pseudo code in intel manual. */
17420 switch (pVCpu->iem.s.enmEffAddrMode)
17421 {
17422 case IEMMODE_16BIT:
17423 IEM_MC_BEGIN(0,0);
17424 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17425 {
17426 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
17427 IEM_MC_IF_CX_IS_NZ() {
17428 IEM_MC_REL_JMP_S8(i8Imm);
17429 } IEM_MC_ELSE() {
17430 IEM_MC_ADVANCE_RIP();
17431 } IEM_MC_ENDIF();
17432 }
17433 else
17434 {
17435 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
17436 IEM_MC_ADVANCE_RIP();
17437 }
17438 IEM_MC_END();
17439 return VINF_SUCCESS;
17440
17441 case IEMMODE_32BIT:
17442 IEM_MC_BEGIN(0,0);
17443 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17444 {
17445 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
17446 IEM_MC_IF_ECX_IS_NZ() {
17447 IEM_MC_REL_JMP_S8(i8Imm);
17448 } IEM_MC_ELSE() {
17449 IEM_MC_ADVANCE_RIP();
17450 } IEM_MC_ENDIF();
17451 }
17452 else
17453 {
17454 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
17455 IEM_MC_ADVANCE_RIP();
17456 }
17457 IEM_MC_END();
17458 return VINF_SUCCESS;
17459
17460 case IEMMODE_64BIT:
17461 IEM_MC_BEGIN(0,0);
17462 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
17463 {
17464 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
17465 IEM_MC_IF_RCX_IS_NZ() {
17466 IEM_MC_REL_JMP_S8(i8Imm);
17467 } IEM_MC_ELSE() {
17468 IEM_MC_ADVANCE_RIP();
17469 } IEM_MC_ENDIF();
17470 }
17471 else
17472 {
17473 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
17474 IEM_MC_ADVANCE_RIP();
17475 }
17476 IEM_MC_END();
17477 return VINF_SUCCESS;
17478
17479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17480 }
17481}
17482
17483
17484/** Opcode 0xe3. */
17485FNIEMOP_DEF(iemOp_jecxz_Jb)
17486{
17487 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
17488 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17491
17492 switch (pVCpu->iem.s.enmEffAddrMode)
17493 {
17494 case IEMMODE_16BIT:
17495 IEM_MC_BEGIN(0,0);
17496 IEM_MC_IF_CX_IS_NZ() {
17497 IEM_MC_ADVANCE_RIP();
17498 } IEM_MC_ELSE() {
17499 IEM_MC_REL_JMP_S8(i8Imm);
17500 } IEM_MC_ENDIF();
17501 IEM_MC_END();
17502 return VINF_SUCCESS;
17503
17504 case IEMMODE_32BIT:
17505 IEM_MC_BEGIN(0,0);
17506 IEM_MC_IF_ECX_IS_NZ() {
17507 IEM_MC_ADVANCE_RIP();
17508 } IEM_MC_ELSE() {
17509 IEM_MC_REL_JMP_S8(i8Imm);
17510 } IEM_MC_ENDIF();
17511 IEM_MC_END();
17512 return VINF_SUCCESS;
17513
17514 case IEMMODE_64BIT:
17515 IEM_MC_BEGIN(0,0);
17516 IEM_MC_IF_RCX_IS_NZ() {
17517 IEM_MC_ADVANCE_RIP();
17518 } IEM_MC_ELSE() {
17519 IEM_MC_REL_JMP_S8(i8Imm);
17520 } IEM_MC_ENDIF();
17521 IEM_MC_END();
17522 return VINF_SUCCESS;
17523
17524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17525 }
17526}
17527
17528
17529/** Opcode 0xe4 */
17530FNIEMOP_DEF(iemOp_in_AL_Ib)
17531{
17532 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
17533 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17535 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
17536}
17537
17538
17539/** Opcode 0xe5 */
17540FNIEMOP_DEF(iemOp_in_eAX_Ib)
17541{
17542 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
17543 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17545 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17546}
17547
17548
17549/** Opcode 0xe6 */
17550FNIEMOP_DEF(iemOp_out_Ib_AL)
17551{
17552 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
17553 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17555 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
17556}
17557
17558
17559/** Opcode 0xe7 */
17560FNIEMOP_DEF(iemOp_out_Ib_eAX)
17561{
17562 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
17563 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17565 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17566}
17567
17568
17569/** Opcode 0xe8. */
17570FNIEMOP_DEF(iemOp_call_Jv)
17571{
17572 IEMOP_MNEMONIC(call_Jv, "call Jv");
17573 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17574 switch (pVCpu->iem.s.enmEffOpSize)
17575 {
17576 case IEMMODE_16BIT:
17577 {
17578 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17579 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
17580 }
17581
17582 case IEMMODE_32BIT:
17583 {
17584 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17585 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
17586 }
17587
17588 case IEMMODE_64BIT:
17589 {
17590 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17591 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
17592 }
17593
17594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17595 }
17596}
17597
17598
17599/** Opcode 0xe9. */
17600FNIEMOP_DEF(iemOp_jmp_Jv)
17601{
17602 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
17603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17604 switch (pVCpu->iem.s.enmEffOpSize)
17605 {
17606 case IEMMODE_16BIT:
17607 {
17608 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
17609 IEM_MC_BEGIN(0, 0);
17610 IEM_MC_REL_JMP_S16(i16Imm);
17611 IEM_MC_END();
17612 return VINF_SUCCESS;
17613 }
17614
17615 case IEMMODE_64BIT:
17616 case IEMMODE_32BIT:
17617 {
17618 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
17619 IEM_MC_BEGIN(0, 0);
17620 IEM_MC_REL_JMP_S32(i32Imm);
17621 IEM_MC_END();
17622 return VINF_SUCCESS;
17623 }
17624
17625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17626 }
17627}
17628
17629
17630/** Opcode 0xea. */
17631FNIEMOP_DEF(iemOp_jmp_Ap)
17632{
17633 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
17634 IEMOP_HLP_NO_64BIT();
17635
17636 /* Decode the far pointer address and pass it on to the far call C implementation. */
17637 uint32_t offSeg;
17638 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
17639 IEM_OPCODE_GET_NEXT_U32(&offSeg);
17640 else
17641 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
17642 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
17643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17644 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
17645}
17646
17647
17648/** Opcode 0xeb. */
17649FNIEMOP_DEF(iemOp_jmp_Jb)
17650{
17651 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
17652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
17653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17654 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17655
17656 IEM_MC_BEGIN(0, 0);
17657 IEM_MC_REL_JMP_S8(i8Imm);
17658 IEM_MC_END();
17659 return VINF_SUCCESS;
17660}
17661
17662
17663/** Opcode 0xec */
17664FNIEMOP_DEF(iemOp_in_AL_DX)
17665{
17666 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
17667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17668 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
17669}
17670
17671
17672/** Opcode 0xed */
17673FNIEMOP_DEF(iemOp_eAX_DX)
17674{
17675 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
17676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17677 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17678}
17679
17680
17681/** Opcode 0xee */
17682FNIEMOP_DEF(iemOp_out_DX_AL)
17683{
17684 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
17685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17686 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
17687}
17688
17689
17690/** Opcode 0xef */
17691FNIEMOP_DEF(iemOp_out_DX_eAX)
17692{
17693 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
17694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17695 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17696}
17697
17698
17699/** Opcode 0xf0. */
17700FNIEMOP_DEF(iemOp_lock)
17701{
17702 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17703 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17704
17705 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17706 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17707}
17708
17709
17710/** Opcode 0xf1. */
17711FNIEMOP_DEF(iemOp_int_1)
17712{
17713 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17714 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17715 /** @todo testcase! */
17716 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17717}
17718
17719
17720/** Opcode 0xf2. */
17721FNIEMOP_DEF(iemOp_repne)
17722{
17723 /* This overrides any previous REPE prefix. */
17724 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17726 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17727
17728 /* For the 4 entry opcode tables, REPNZ overrides any previous
17729 REPZ and operand size prefixes. */
17730 pVCpu->iem.s.idxPrefix = 3;
17731
17732 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17733 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17734}
17735
17736
17737/** Opcode 0xf3. */
17738FNIEMOP_DEF(iemOp_repe)
17739{
17740 /* This overrides any previous REPNE prefix. */
17741 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17742 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17743 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17744
17745 /* For the 4 entry opcode tables, REPNZ overrides any previous
17746 REPNZ and operand size prefixes. */
17747 pVCpu->iem.s.idxPrefix = 2;
17748
17749 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17750 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17751}
17752
17753
17754/** Opcode 0xf4. */
17755FNIEMOP_DEF(iemOp_hlt)
17756{
17757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17758 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17759}
17760
17761
17762/** Opcode 0xf5. */
17763FNIEMOP_DEF(iemOp_cmc)
17764{
17765 IEMOP_MNEMONIC(cmc, "cmc");
17766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17767 IEM_MC_BEGIN(0, 0);
17768 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17769 IEM_MC_ADVANCE_RIP();
17770 IEM_MC_END();
17771 return VINF_SUCCESS;
17772}
17773
17774
17775/**
17776 * Common implementation of 'inc/dec/not/neg Eb'.
17777 *
17778 * @param bRm The RM byte.
17779 * @param pImpl The instruction implementation.
17780 */
17781FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17782{
17783 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17784 {
17785 /* register access */
17786 IEM_MC_BEGIN(2, 0);
17787 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17788 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17789 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17790 IEM_MC_REF_EFLAGS(pEFlags);
17791 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17792 IEM_MC_ADVANCE_RIP();
17793 IEM_MC_END();
17794 }
17795 else
17796 {
17797 /* memory access. */
17798 IEM_MC_BEGIN(2, 2);
17799 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17800 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17802
17803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17804 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17805 IEM_MC_FETCH_EFLAGS(EFlags);
17806 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17807 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17808 else
17809 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17810
17811 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17812 IEM_MC_COMMIT_EFLAGS(EFlags);
17813 IEM_MC_ADVANCE_RIP();
17814 IEM_MC_END();
17815 }
17816 return VINF_SUCCESS;
17817}
17818
17819
17820/**
17821 * Common implementation of 'inc/dec/not/neg Ev'.
17822 *
17823 * @param bRm The RM byte.
17824 * @param pImpl The instruction implementation.
17825 */
17826FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17827{
17828 /* Registers are handled by a common worker. */
17829 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17830 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17831
17832 /* Memory we do here. */
17833 switch (pVCpu->iem.s.enmEffOpSize)
17834 {
17835 case IEMMODE_16BIT:
17836 IEM_MC_BEGIN(2, 2);
17837 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17838 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17840
17841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17842 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17843 IEM_MC_FETCH_EFLAGS(EFlags);
17844 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17845 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17846 else
17847 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17848
17849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17850 IEM_MC_COMMIT_EFLAGS(EFlags);
17851 IEM_MC_ADVANCE_RIP();
17852 IEM_MC_END();
17853 return VINF_SUCCESS;
17854
17855 case IEMMODE_32BIT:
17856 IEM_MC_BEGIN(2, 2);
17857 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17860
17861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17862 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17863 IEM_MC_FETCH_EFLAGS(EFlags);
17864 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17865 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17866 else
17867 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17868
17869 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17870 IEM_MC_COMMIT_EFLAGS(EFlags);
17871 IEM_MC_ADVANCE_RIP();
17872 IEM_MC_END();
17873 return VINF_SUCCESS;
17874
17875 case IEMMODE_64BIT:
17876 IEM_MC_BEGIN(2, 2);
17877 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17880
17881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17882 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17883 IEM_MC_FETCH_EFLAGS(EFlags);
17884 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17885 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17886 else
17887 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17888
17889 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17890 IEM_MC_COMMIT_EFLAGS(EFlags);
17891 IEM_MC_ADVANCE_RIP();
17892 IEM_MC_END();
17893 return VINF_SUCCESS;
17894
17895 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17896 }
17897}
17898
17899
17900/** Opcode 0xf6 /0. */
17901FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17902{
17903 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17904 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17905
17906 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17907 {
17908 /* register access */
17909 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17911
17912 IEM_MC_BEGIN(3, 0);
17913 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17914 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17915 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17916 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17917 IEM_MC_REF_EFLAGS(pEFlags);
17918 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17919 IEM_MC_ADVANCE_RIP();
17920 IEM_MC_END();
17921 }
17922 else
17923 {
17924 /* memory access. */
17925 IEM_MC_BEGIN(3, 2);
17926 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17927 IEM_MC_ARG(uint8_t, u8Src, 1);
17928 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17930
17931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17932 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17933 IEM_MC_ASSIGN(u8Src, u8Imm);
17934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17935 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17936 IEM_MC_FETCH_EFLAGS(EFlags);
17937 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17938
17939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17940 IEM_MC_COMMIT_EFLAGS(EFlags);
17941 IEM_MC_ADVANCE_RIP();
17942 IEM_MC_END();
17943 }
17944 return VINF_SUCCESS;
17945}
17946
17947
17948/** Opcode 0xf7 /0. */
17949FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17950{
17951 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17953
17954 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17955 {
17956 /* register access */
17957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17958 switch (pVCpu->iem.s.enmEffOpSize)
17959 {
17960 case IEMMODE_16BIT:
17961 {
17962 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17963 IEM_MC_BEGIN(3, 0);
17964 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17965 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17967 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17968 IEM_MC_REF_EFLAGS(pEFlags);
17969 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17970 IEM_MC_ADVANCE_RIP();
17971 IEM_MC_END();
17972 return VINF_SUCCESS;
17973 }
17974
17975 case IEMMODE_32BIT:
17976 {
17977 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17978 IEM_MC_BEGIN(3, 0);
17979 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17980 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17981 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17982 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17983 IEM_MC_REF_EFLAGS(pEFlags);
17984 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17985 /* No clearing the high dword here - test doesn't write back the result. */
17986 IEM_MC_ADVANCE_RIP();
17987 IEM_MC_END();
17988 return VINF_SUCCESS;
17989 }
17990
17991 case IEMMODE_64BIT:
17992 {
17993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17994 IEM_MC_BEGIN(3, 0);
17995 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17996 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17998 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17999 IEM_MC_REF_EFLAGS(pEFlags);
18000 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18001 IEM_MC_ADVANCE_RIP();
18002 IEM_MC_END();
18003 return VINF_SUCCESS;
18004 }
18005
18006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18007 }
18008 }
18009 else
18010 {
18011 /* memory access. */
18012 switch (pVCpu->iem.s.enmEffOpSize)
18013 {
18014 case IEMMODE_16BIT:
18015 {
18016 IEM_MC_BEGIN(3, 2);
18017 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
18018 IEM_MC_ARG(uint16_t, u16Src, 1);
18019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18021
18022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
18023 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
18024 IEM_MC_ASSIGN(u16Src, u16Imm);
18025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18026 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18027 IEM_MC_FETCH_EFLAGS(EFlags);
18028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
18029
18030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
18031 IEM_MC_COMMIT_EFLAGS(EFlags);
18032 IEM_MC_ADVANCE_RIP();
18033 IEM_MC_END();
18034 return VINF_SUCCESS;
18035 }
18036
18037 case IEMMODE_32BIT:
18038 {
18039 IEM_MC_BEGIN(3, 2);
18040 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
18041 IEM_MC_ARG(uint32_t, u32Src, 1);
18042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18044
18045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18046 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
18047 IEM_MC_ASSIGN(u32Src, u32Imm);
18048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18049 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18050 IEM_MC_FETCH_EFLAGS(EFlags);
18051 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
18052
18053 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
18054 IEM_MC_COMMIT_EFLAGS(EFlags);
18055 IEM_MC_ADVANCE_RIP();
18056 IEM_MC_END();
18057 return VINF_SUCCESS;
18058 }
18059
18060 case IEMMODE_64BIT:
18061 {
18062 IEM_MC_BEGIN(3, 2);
18063 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
18064 IEM_MC_ARG(uint64_t, u64Src, 1);
18065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
18066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18067
18068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
18069 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
18070 IEM_MC_ASSIGN(u64Src, u64Imm);
18071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18072 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
18073 IEM_MC_FETCH_EFLAGS(EFlags);
18074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
18075
18076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
18077 IEM_MC_COMMIT_EFLAGS(EFlags);
18078 IEM_MC_ADVANCE_RIP();
18079 IEM_MC_END();
18080 return VINF_SUCCESS;
18081 }
18082
18083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18084 }
18085 }
18086}
18087
18088
18089/** Opcode 0xf6 /4, /5, /6 and /7. */
18090FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
18091{
18092 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18093 {
18094 /* register access */
18095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18096 IEM_MC_BEGIN(3, 1);
18097 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18098 IEM_MC_ARG(uint8_t, u8Value, 1);
18099 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18100 IEM_MC_LOCAL(int32_t, rc);
18101
18102 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18103 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18104 IEM_MC_REF_EFLAGS(pEFlags);
18105 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18106 IEM_MC_IF_LOCAL_IS_Z(rc) {
18107 IEM_MC_ADVANCE_RIP();
18108 } IEM_MC_ELSE() {
18109 IEM_MC_RAISE_DIVIDE_ERROR();
18110 } IEM_MC_ENDIF();
18111
18112 IEM_MC_END();
18113 }
18114 else
18115 {
18116 /* memory access. */
18117 IEM_MC_BEGIN(3, 2);
18118 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18119 IEM_MC_ARG(uint8_t, u8Value, 1);
18120 IEM_MC_ARG(uint32_t *, pEFlags, 2);
18121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18122 IEM_MC_LOCAL(int32_t, rc);
18123
18124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18126 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18127 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18128 IEM_MC_REF_EFLAGS(pEFlags);
18129 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
18130 IEM_MC_IF_LOCAL_IS_Z(rc) {
18131 IEM_MC_ADVANCE_RIP();
18132 } IEM_MC_ELSE() {
18133 IEM_MC_RAISE_DIVIDE_ERROR();
18134 } IEM_MC_ENDIF();
18135
18136 IEM_MC_END();
18137 }
18138 return VINF_SUCCESS;
18139}
18140
18141
18142/** Opcode 0xf7 /4, /5, /6 and /7. */
18143FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
18144{
18145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18146
18147 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18148 {
18149 /* register access */
18150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18151 switch (pVCpu->iem.s.enmEffOpSize)
18152 {
18153 case IEMMODE_16BIT:
18154 {
18155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18156 IEM_MC_BEGIN(4, 1);
18157 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18158 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18159 IEM_MC_ARG(uint16_t, u16Value, 2);
18160 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18161 IEM_MC_LOCAL(int32_t, rc);
18162
18163 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18164 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18165 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18166 IEM_MC_REF_EFLAGS(pEFlags);
18167 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18168 IEM_MC_IF_LOCAL_IS_Z(rc) {
18169 IEM_MC_ADVANCE_RIP();
18170 } IEM_MC_ELSE() {
18171 IEM_MC_RAISE_DIVIDE_ERROR();
18172 } IEM_MC_ENDIF();
18173
18174 IEM_MC_END();
18175 return VINF_SUCCESS;
18176 }
18177
18178 case IEMMODE_32BIT:
18179 {
18180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18181 IEM_MC_BEGIN(4, 1);
18182 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18183 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18184 IEM_MC_ARG(uint32_t, u32Value, 2);
18185 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18186 IEM_MC_LOCAL(int32_t, rc);
18187
18188 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18189 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18190 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18191 IEM_MC_REF_EFLAGS(pEFlags);
18192 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18193 IEM_MC_IF_LOCAL_IS_Z(rc) {
18194 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18195 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18196 IEM_MC_ADVANCE_RIP();
18197 } IEM_MC_ELSE() {
18198 IEM_MC_RAISE_DIVIDE_ERROR();
18199 } IEM_MC_ENDIF();
18200
18201 IEM_MC_END();
18202 return VINF_SUCCESS;
18203 }
18204
18205 case IEMMODE_64BIT:
18206 {
18207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18208 IEM_MC_BEGIN(4, 1);
18209 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18210 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18211 IEM_MC_ARG(uint64_t, u64Value, 2);
18212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18213 IEM_MC_LOCAL(int32_t, rc);
18214
18215 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18216 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18217 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18218 IEM_MC_REF_EFLAGS(pEFlags);
18219 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18220 IEM_MC_IF_LOCAL_IS_Z(rc) {
18221 IEM_MC_ADVANCE_RIP();
18222 } IEM_MC_ELSE() {
18223 IEM_MC_RAISE_DIVIDE_ERROR();
18224 } IEM_MC_ENDIF();
18225
18226 IEM_MC_END();
18227 return VINF_SUCCESS;
18228 }
18229
18230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18231 }
18232 }
18233 else
18234 {
18235 /* memory access. */
18236 switch (pVCpu->iem.s.enmEffOpSize)
18237 {
18238 case IEMMODE_16BIT:
18239 {
18240 IEM_MC_BEGIN(4, 2);
18241 IEM_MC_ARG(uint16_t *, pu16AX, 0);
18242 IEM_MC_ARG(uint16_t *, pu16DX, 1);
18243 IEM_MC_ARG(uint16_t, u16Value, 2);
18244 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18246 IEM_MC_LOCAL(int32_t, rc);
18247
18248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18250 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18251 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
18252 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
18253 IEM_MC_REF_EFLAGS(pEFlags);
18254 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
18255 IEM_MC_IF_LOCAL_IS_Z(rc) {
18256 IEM_MC_ADVANCE_RIP();
18257 } IEM_MC_ELSE() {
18258 IEM_MC_RAISE_DIVIDE_ERROR();
18259 } IEM_MC_ENDIF();
18260
18261 IEM_MC_END();
18262 return VINF_SUCCESS;
18263 }
18264
18265 case IEMMODE_32BIT:
18266 {
18267 IEM_MC_BEGIN(4, 2);
18268 IEM_MC_ARG(uint32_t *, pu32AX, 0);
18269 IEM_MC_ARG(uint32_t *, pu32DX, 1);
18270 IEM_MC_ARG(uint32_t, u32Value, 2);
18271 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18273 IEM_MC_LOCAL(int32_t, rc);
18274
18275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18277 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18278 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
18279 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
18280 IEM_MC_REF_EFLAGS(pEFlags);
18281 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
18282 IEM_MC_IF_LOCAL_IS_Z(rc) {
18283 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
18284 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
18285 IEM_MC_ADVANCE_RIP();
18286 } IEM_MC_ELSE() {
18287 IEM_MC_RAISE_DIVIDE_ERROR();
18288 } IEM_MC_ENDIF();
18289
18290 IEM_MC_END();
18291 return VINF_SUCCESS;
18292 }
18293
18294 case IEMMODE_64BIT:
18295 {
18296 IEM_MC_BEGIN(4, 2);
18297 IEM_MC_ARG(uint64_t *, pu64AX, 0);
18298 IEM_MC_ARG(uint64_t *, pu64DX, 1);
18299 IEM_MC_ARG(uint64_t, u64Value, 2);
18300 IEM_MC_ARG(uint32_t *, pEFlags, 3);
18301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
18302 IEM_MC_LOCAL(int32_t, rc);
18303
18304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
18305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18306 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
18307 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
18308 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
18309 IEM_MC_REF_EFLAGS(pEFlags);
18310 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
18311 IEM_MC_IF_LOCAL_IS_Z(rc) {
18312 IEM_MC_ADVANCE_RIP();
18313 } IEM_MC_ELSE() {
18314 IEM_MC_RAISE_DIVIDE_ERROR();
18315 } IEM_MC_ENDIF();
18316
18317 IEM_MC_END();
18318 return VINF_SUCCESS;
18319 }
18320
18321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18322 }
18323 }
18324}
18325
18326/** Opcode 0xf6. */
18327FNIEMOP_DEF(iemOp_Grp3_Eb)
18328{
18329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18330 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18331 {
18332 case 0:
18333 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
18334 case 1:
18335/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18336 return IEMOP_RAISE_INVALID_OPCODE();
18337 case 2:
18338 IEMOP_MNEMONIC(not_Eb, "not Eb");
18339 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
18340 case 3:
18341 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
18342 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
18343 case 4:
18344 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
18345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18346 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
18347 case 5:
18348 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
18349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18350 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
18351 case 6:
18352 IEMOP_MNEMONIC(div_Eb, "div Eb");
18353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18354 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
18355 case 7:
18356 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
18357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18358 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
18359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18360 }
18361}
18362
18363
18364/** Opcode 0xf7. */
18365FNIEMOP_DEF(iemOp_Grp3_Ev)
18366{
18367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18368 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18369 {
18370 case 0:
18371 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
18372 case 1:
18373/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
18374 return IEMOP_RAISE_INVALID_OPCODE();
18375 case 2:
18376 IEMOP_MNEMONIC(not_Ev, "not Ev");
18377 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
18378 case 3:
18379 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
18380 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
18381 case 4:
18382 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
18383 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18384 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
18385 case 5:
18386 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
18387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
18388 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
18389 case 6:
18390 IEMOP_MNEMONIC(div_Ev, "div Ev");
18391 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18392 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
18393 case 7:
18394 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
18395 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
18396 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
18397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18398 }
18399}
18400
18401
18402/** Opcode 0xf8. */
18403FNIEMOP_DEF(iemOp_clc)
18404{
18405 IEMOP_MNEMONIC(clc, "clc");
18406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18407 IEM_MC_BEGIN(0, 0);
18408 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
18409 IEM_MC_ADVANCE_RIP();
18410 IEM_MC_END();
18411 return VINF_SUCCESS;
18412}
18413
18414
18415/** Opcode 0xf9. */
18416FNIEMOP_DEF(iemOp_stc)
18417{
18418 IEMOP_MNEMONIC(stc, "stc");
18419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18420 IEM_MC_BEGIN(0, 0);
18421 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
18422 IEM_MC_ADVANCE_RIP();
18423 IEM_MC_END();
18424 return VINF_SUCCESS;
18425}
18426
18427
18428/** Opcode 0xfa. */
18429FNIEMOP_DEF(iemOp_cli)
18430{
18431 IEMOP_MNEMONIC(cli, "cli");
18432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18433 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
18434}
18435
18436
18437FNIEMOP_DEF(iemOp_sti)
18438{
18439 IEMOP_MNEMONIC(sti, "sti");
18440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18441 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
18442}
18443
18444
18445/** Opcode 0xfc. */
18446FNIEMOP_DEF(iemOp_cld)
18447{
18448 IEMOP_MNEMONIC(cld, "cld");
18449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18450 IEM_MC_BEGIN(0, 0);
18451 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
18452 IEM_MC_ADVANCE_RIP();
18453 IEM_MC_END();
18454 return VINF_SUCCESS;
18455}
18456
18457
18458/** Opcode 0xfd. */
18459FNIEMOP_DEF(iemOp_std)
18460{
18461 IEMOP_MNEMONIC(std, "std");
18462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18463 IEM_MC_BEGIN(0, 0);
18464 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
18465 IEM_MC_ADVANCE_RIP();
18466 IEM_MC_END();
18467 return VINF_SUCCESS;
18468}
18469
18470
18471/** Opcode 0xfe. */
18472FNIEMOP_DEF(iemOp_Grp4)
18473{
18474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18475 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18476 {
18477 case 0:
18478 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
18479 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
18480 case 1:
18481 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
18482 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
18483 default:
18484 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
18485 return IEMOP_RAISE_INVALID_OPCODE();
18486 }
18487}
18488
18489
18490/**
18491 * Opcode 0xff /2.
18492 * @param bRm The RM byte.
18493 */
18494FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
18495{
18496 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
18497 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18498
18499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18500 {
18501 /* The new RIP is taken from a register. */
18502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18503 switch (pVCpu->iem.s.enmEffOpSize)
18504 {
18505 case IEMMODE_16BIT:
18506 IEM_MC_BEGIN(1, 0);
18507 IEM_MC_ARG(uint16_t, u16Target, 0);
18508 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18509 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18510 IEM_MC_END()
18511 return VINF_SUCCESS;
18512
18513 case IEMMODE_32BIT:
18514 IEM_MC_BEGIN(1, 0);
18515 IEM_MC_ARG(uint32_t, u32Target, 0);
18516 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18517 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18518 IEM_MC_END()
18519 return VINF_SUCCESS;
18520
18521 case IEMMODE_64BIT:
18522 IEM_MC_BEGIN(1, 0);
18523 IEM_MC_ARG(uint64_t, u64Target, 0);
18524 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18525 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18526 IEM_MC_END()
18527 return VINF_SUCCESS;
18528
18529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18530 }
18531 }
18532 else
18533 {
18534 /* The new RIP is taken from a register. */
18535 switch (pVCpu->iem.s.enmEffOpSize)
18536 {
18537 case IEMMODE_16BIT:
18538 IEM_MC_BEGIN(1, 1);
18539 IEM_MC_ARG(uint16_t, u16Target, 0);
18540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18543 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18544 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
18545 IEM_MC_END()
18546 return VINF_SUCCESS;
18547
18548 case IEMMODE_32BIT:
18549 IEM_MC_BEGIN(1, 1);
18550 IEM_MC_ARG(uint32_t, u32Target, 0);
18551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18554 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18555 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
18556 IEM_MC_END()
18557 return VINF_SUCCESS;
18558
18559 case IEMMODE_64BIT:
18560 IEM_MC_BEGIN(1, 1);
18561 IEM_MC_ARG(uint64_t, u64Target, 0);
18562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18565 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18566 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
18567 IEM_MC_END()
18568 return VINF_SUCCESS;
18569
18570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18571 }
18572 }
18573}
18574
18575typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
18576
18577FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
18578{
18579 /* Registers? How?? */
18580 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
18581 { /* likely */ }
18582 else
18583 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
18584
18585 /* Far pointer loaded from memory. */
18586 switch (pVCpu->iem.s.enmEffOpSize)
18587 {
18588 case IEMMODE_16BIT:
18589 IEM_MC_BEGIN(3, 1);
18590 IEM_MC_ARG(uint16_t, u16Sel, 0);
18591 IEM_MC_ARG(uint16_t, offSeg, 1);
18592 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18596 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18597 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
18598 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18599 IEM_MC_END();
18600 return VINF_SUCCESS;
18601
18602 case IEMMODE_64BIT:
18603 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
18604 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
18605 * and call far qword [rsp] encodings. */
18606 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
18607 {
18608 IEM_MC_BEGIN(3, 1);
18609 IEM_MC_ARG(uint16_t, u16Sel, 0);
18610 IEM_MC_ARG(uint64_t, offSeg, 1);
18611 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
18612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18615 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18616 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
18617 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18618 IEM_MC_END();
18619 return VINF_SUCCESS;
18620 }
18621 /* AMD falls thru. */
18622
18623 case IEMMODE_32BIT:
18624 IEM_MC_BEGIN(3, 1);
18625 IEM_MC_ARG(uint16_t, u16Sel, 0);
18626 IEM_MC_ARG(uint32_t, offSeg, 1);
18627 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
18628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18631 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18632 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
18633 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
18634 IEM_MC_END();
18635 return VINF_SUCCESS;
18636
18637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18638 }
18639}
18640
18641
18642/**
18643 * Opcode 0xff /3.
18644 * @param bRm The RM byte.
18645 */
18646FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
18647{
18648 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
18649 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
18650}
18651
18652
18653/**
18654 * Opcode 0xff /4.
18655 * @param bRm The RM byte.
18656 */
18657FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
18658{
18659 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
18660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18661
18662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18663 {
18664 /* The new RIP is taken from a register. */
18665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18666 switch (pVCpu->iem.s.enmEffOpSize)
18667 {
18668 case IEMMODE_16BIT:
18669 IEM_MC_BEGIN(0, 1);
18670 IEM_MC_LOCAL(uint16_t, u16Target);
18671 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18672 IEM_MC_SET_RIP_U16(u16Target);
18673 IEM_MC_END()
18674 return VINF_SUCCESS;
18675
18676 case IEMMODE_32BIT:
18677 IEM_MC_BEGIN(0, 1);
18678 IEM_MC_LOCAL(uint32_t, u32Target);
18679 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18680 IEM_MC_SET_RIP_U32(u32Target);
18681 IEM_MC_END()
18682 return VINF_SUCCESS;
18683
18684 case IEMMODE_64BIT:
18685 IEM_MC_BEGIN(0, 1);
18686 IEM_MC_LOCAL(uint64_t, u64Target);
18687 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18688 IEM_MC_SET_RIP_U64(u64Target);
18689 IEM_MC_END()
18690 return VINF_SUCCESS;
18691
18692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18693 }
18694 }
18695 else
18696 {
18697 /* The new RIP is taken from a memory location. */
18698 switch (pVCpu->iem.s.enmEffOpSize)
18699 {
18700 case IEMMODE_16BIT:
18701 IEM_MC_BEGIN(0, 2);
18702 IEM_MC_LOCAL(uint16_t, u16Target);
18703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18706 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18707 IEM_MC_SET_RIP_U16(u16Target);
18708 IEM_MC_END()
18709 return VINF_SUCCESS;
18710
18711 case IEMMODE_32BIT:
18712 IEM_MC_BEGIN(0, 2);
18713 IEM_MC_LOCAL(uint32_t, u32Target);
18714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18717 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18718 IEM_MC_SET_RIP_U32(u32Target);
18719 IEM_MC_END()
18720 return VINF_SUCCESS;
18721
18722 case IEMMODE_64BIT:
18723 IEM_MC_BEGIN(0, 2);
18724 IEM_MC_LOCAL(uint64_t, u64Target);
18725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18728 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18729 IEM_MC_SET_RIP_U64(u64Target);
18730 IEM_MC_END()
18731 return VINF_SUCCESS;
18732
18733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18734 }
18735 }
18736}
18737
18738
18739/**
18740 * Opcode 0xff /5.
18741 * @param bRm The RM byte.
18742 */
18743FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18744{
18745 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18746 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18747}
18748
18749
18750/**
18751 * Opcode 0xff /6.
18752 * @param bRm The RM byte.
18753 */
18754FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18755{
18756 IEMOP_MNEMONIC(push_Ev, "push Ev");
18757
18758 /* Registers are handled by a common worker. */
18759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18760 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18761
18762 /* Memory we do here. */
18763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18764 switch (pVCpu->iem.s.enmEffOpSize)
18765 {
18766 case IEMMODE_16BIT:
18767 IEM_MC_BEGIN(0, 2);
18768 IEM_MC_LOCAL(uint16_t, u16Src);
18769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18772 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18773 IEM_MC_PUSH_U16(u16Src);
18774 IEM_MC_ADVANCE_RIP();
18775 IEM_MC_END();
18776 return VINF_SUCCESS;
18777
18778 case IEMMODE_32BIT:
18779 IEM_MC_BEGIN(0, 2);
18780 IEM_MC_LOCAL(uint32_t, u32Src);
18781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18784 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18785 IEM_MC_PUSH_U32(u32Src);
18786 IEM_MC_ADVANCE_RIP();
18787 IEM_MC_END();
18788 return VINF_SUCCESS;
18789
18790 case IEMMODE_64BIT:
18791 IEM_MC_BEGIN(0, 2);
18792 IEM_MC_LOCAL(uint64_t, u64Src);
18793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18796 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18797 IEM_MC_PUSH_U64(u64Src);
18798 IEM_MC_ADVANCE_RIP();
18799 IEM_MC_END();
18800 return VINF_SUCCESS;
18801
18802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18803 }
18804}
18805
18806
18807/** Opcode 0xff. */
18808FNIEMOP_DEF(iemOp_Grp5)
18809{
18810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18811 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18812 {
18813 case 0:
18814 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18815 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18816 case 1:
18817 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18818 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18819 case 2:
18820 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18821 case 3:
18822 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18823 case 4:
18824 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18825 case 5:
18826 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18827 case 6:
18828 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18829 case 7:
18830 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18831 return IEMOP_RAISE_INVALID_OPCODE();
18832 }
18833 AssertFailedReturn(VERR_IEM_IPE_3);
18834}
18835
18836
18837
18838const PFNIEMOP g_apfnOneByteMap[256] =
18839{
18840 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18841 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18842 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18843 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18844 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18845 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18846 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18847 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18848 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18849 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18850 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18851 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18852 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18853 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18854 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18855 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18856 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18857 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18858 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18859 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18860 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18861 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18862 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18863 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18864 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18865 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18866 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18867 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18868 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18869 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18870 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18871 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18872 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18873 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18874 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18875 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18876 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18877 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18878 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18879 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18880 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18881 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18882 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18883 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18884 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18885 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18886 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18887 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18888 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18889 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18890 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18891 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18892 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18893 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18894 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18895 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18896 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18897 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18898 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18899 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18900 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18901 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18902 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18903 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18904};
18905
18906
18907/** @} */
18908
18909#ifdef _MSC_VER
18910# pragma warning(pop)
18911#endif
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette