VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 65515

Last change on this file since 65515 was 65509, checked in by vboxsync, 8 years ago

IEM: Enabled cmpxchg16b code.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 632.5 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 65509 2017-01-29 17:34:57Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24#ifdef _MSC_VER
25# pragma warning(push)
26# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
27#endif
28
29
30/**
31 * Common worker for instructions like ADD, AND, OR, ++ with a byte
32 * memory/register as the destination.
33 *
34 * @param pImpl Pointer to the instruction implementation (assembly).
35 */
36FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
37{
38 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
39
40 /*
41 * If rm is denoting a register, no more instruction bytes.
42 */
43 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
44 {
45 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
46
47 IEM_MC_BEGIN(3, 0);
48 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
49 IEM_MC_ARG(uint8_t, u8Src, 1);
50 IEM_MC_ARG(uint32_t *, pEFlags, 2);
51
52 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
53 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
54 IEM_MC_REF_EFLAGS(pEFlags);
55 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
56
57 IEM_MC_ADVANCE_RIP();
58 IEM_MC_END();
59 }
60 else
61 {
62 /*
63 * We're accessing memory.
64 * Note! We're putting the eflags on the stack here so we can commit them
65 * after the memory.
66 */
67 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
68 IEM_MC_BEGIN(3, 2);
69 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
70 IEM_MC_ARG(uint8_t, u8Src, 1);
71 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
72 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
73
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
75 if (!pImpl->pfnLockedU8)
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
77 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
78 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
79 IEM_MC_FETCH_EFLAGS(EFlags);
80 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
81 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
82 else
83 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
84
85 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
86 IEM_MC_COMMIT_EFLAGS(EFlags);
87 IEM_MC_ADVANCE_RIP();
88 IEM_MC_END();
89 }
90 return VINF_SUCCESS;
91}
92
93
94/**
95 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
96 * memory/register as the destination.
97 *
98 * @param pImpl Pointer to the instruction implementation (assembly).
99 */
100FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
101{
102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
103
104 /*
105 * If rm is denoting a register, no more instruction bytes.
106 */
107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
108 {
109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
110
111 switch (pVCpu->iem.s.enmEffOpSize)
112 {
113 case IEMMODE_16BIT:
114 IEM_MC_BEGIN(3, 0);
115 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
116 IEM_MC_ARG(uint16_t, u16Src, 1);
117 IEM_MC_ARG(uint32_t *, pEFlags, 2);
118
119 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
120 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
121 IEM_MC_REF_EFLAGS(pEFlags);
122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
123
124 IEM_MC_ADVANCE_RIP();
125 IEM_MC_END();
126 break;
127
128 case IEMMODE_32BIT:
129 IEM_MC_BEGIN(3, 0);
130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
131 IEM_MC_ARG(uint32_t, u32Src, 1);
132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
133
134 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
135 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
136 IEM_MC_REF_EFLAGS(pEFlags);
137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
138
139 if (pImpl != &g_iemAImpl_test)
140 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 break;
144
145 case IEMMODE_64BIT:
146 IEM_MC_BEGIN(3, 0);
147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
148 IEM_MC_ARG(uint64_t, u64Src, 1);
149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
150
151 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
152 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
153 IEM_MC_REF_EFLAGS(pEFlags);
154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
155
156 IEM_MC_ADVANCE_RIP();
157 IEM_MC_END();
158 break;
159 }
160 }
161 else
162 {
163 /*
164 * We're accessing memory.
165 * Note! We're putting the eflags on the stack here so we can commit them
166 * after the memory.
167 */
168 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
169 switch (pVCpu->iem.s.enmEffOpSize)
170 {
171 case IEMMODE_16BIT:
172 IEM_MC_BEGIN(3, 2);
173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
174 IEM_MC_ARG(uint16_t, u16Src, 1);
175 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
177
178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
179 if (!pImpl->pfnLockedU16)
180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
181 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
182 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
183 IEM_MC_FETCH_EFLAGS(EFlags);
184 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
186 else
187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
188
189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
190 IEM_MC_COMMIT_EFLAGS(EFlags);
191 IEM_MC_ADVANCE_RIP();
192 IEM_MC_END();
193 break;
194
195 case IEMMODE_32BIT:
196 IEM_MC_BEGIN(3, 2);
197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
198 IEM_MC_ARG(uint32_t, u32Src, 1);
199 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
201
202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
203 if (!pImpl->pfnLockedU32)
204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
205 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
206 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
207 IEM_MC_FETCH_EFLAGS(EFlags);
208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
210 else
211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
212
213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
214 IEM_MC_COMMIT_EFLAGS(EFlags);
215 IEM_MC_ADVANCE_RIP();
216 IEM_MC_END();
217 break;
218
219 case IEMMODE_64BIT:
220 IEM_MC_BEGIN(3, 2);
221 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
222 IEM_MC_ARG(uint64_t, u64Src, 1);
223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
227 if (!pImpl->pfnLockedU64)
228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
229 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
230 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
231 IEM_MC_FETCH_EFLAGS(EFlags);
232 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
233 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
234 else
235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
236
237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
238 IEM_MC_COMMIT_EFLAGS(EFlags);
239 IEM_MC_ADVANCE_RIP();
240 IEM_MC_END();
241 break;
242 }
243 }
244 return VINF_SUCCESS;
245}
246
247
248/**
249 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
250 * the destination.
251 *
252 * @param pImpl Pointer to the instruction implementation (assembly).
253 */
254FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
255{
256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
257
258 /*
259 * If rm is denoting a register, no more instruction bytes.
260 */
261 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
262 {
263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
264 IEM_MC_BEGIN(3, 0);
265 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
266 IEM_MC_ARG(uint8_t, u8Src, 1);
267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
268
269 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
270 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
271 IEM_MC_REF_EFLAGS(pEFlags);
272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
273
274 IEM_MC_ADVANCE_RIP();
275 IEM_MC_END();
276 }
277 else
278 {
279 /*
280 * We're accessing memory.
281 */
282 IEM_MC_BEGIN(3, 1);
283 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
284 IEM_MC_ARG(uint8_t, u8Src, 1);
285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
286 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
287
288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
290 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
291 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
292 IEM_MC_REF_EFLAGS(pEFlags);
293 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
294
295 IEM_MC_ADVANCE_RIP();
296 IEM_MC_END();
297 }
298 return VINF_SUCCESS;
299}
300
301
302/**
303 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
304 * register as the destination.
305 *
306 * @param pImpl Pointer to the instruction implementation (assembly).
307 */
308FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
309{
310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
311
312 /*
313 * If rm is denoting a register, no more instruction bytes.
314 */
315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
316 {
317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
318 switch (pVCpu->iem.s.enmEffOpSize)
319 {
320 case IEMMODE_16BIT:
321 IEM_MC_BEGIN(3, 0);
322 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
323 IEM_MC_ARG(uint16_t, u16Src, 1);
324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
325
326 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
327 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
328 IEM_MC_REF_EFLAGS(pEFlags);
329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
330
331 IEM_MC_ADVANCE_RIP();
332 IEM_MC_END();
333 break;
334
335 case IEMMODE_32BIT:
336 IEM_MC_BEGIN(3, 0);
337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
338 IEM_MC_ARG(uint32_t, u32Src, 1);
339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
340
341 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
342 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
343 IEM_MC_REF_EFLAGS(pEFlags);
344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
345
346 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
347 IEM_MC_ADVANCE_RIP();
348 IEM_MC_END();
349 break;
350
351 case IEMMODE_64BIT:
352 IEM_MC_BEGIN(3, 0);
353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
354 IEM_MC_ARG(uint64_t, u64Src, 1);
355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
356
357 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
358 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
359 IEM_MC_REF_EFLAGS(pEFlags);
360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
361
362 IEM_MC_ADVANCE_RIP();
363 IEM_MC_END();
364 break;
365 }
366 }
367 else
368 {
369 /*
370 * We're accessing memory.
371 */
372 switch (pVCpu->iem.s.enmEffOpSize)
373 {
374 case IEMMODE_16BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
377 IEM_MC_ARG(uint16_t, u16Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
383 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
384 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
385 IEM_MC_REF_EFLAGS(pEFlags);
386 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
387
388 IEM_MC_ADVANCE_RIP();
389 IEM_MC_END();
390 break;
391
392 case IEMMODE_32BIT:
393 IEM_MC_BEGIN(3, 1);
394 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
395 IEM_MC_ARG(uint32_t, u32Src, 1);
396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
398
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
401 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
402 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
403 IEM_MC_REF_EFLAGS(pEFlags);
404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
405
406 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
407 IEM_MC_ADVANCE_RIP();
408 IEM_MC_END();
409 break;
410
411 case IEMMODE_64BIT:
412 IEM_MC_BEGIN(3, 1);
413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
414 IEM_MC_ARG(uint64_t, u64Src, 1);
415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
417
418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
420 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
421 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
422 IEM_MC_REF_EFLAGS(pEFlags);
423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
424
425 IEM_MC_ADVANCE_RIP();
426 IEM_MC_END();
427 break;
428 }
429 }
430 return VINF_SUCCESS;
431}
432
433
434/**
435 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
436 * a byte immediate.
437 *
438 * @param pImpl Pointer to the instruction implementation (assembly).
439 */
440FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
441{
442 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
444
445 IEM_MC_BEGIN(3, 0);
446 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
447 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
448 IEM_MC_ARG(uint32_t *, pEFlags, 2);
449
450 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
451 IEM_MC_REF_EFLAGS(pEFlags);
452 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
453
454 IEM_MC_ADVANCE_RIP();
455 IEM_MC_END();
456 return VINF_SUCCESS;
457}
458
459
460/**
461 * Common worker for instructions like ADD, AND, OR, ++ with working on
462 * AX/EAX/RAX with a word/dword immediate.
463 *
464 * @param pImpl Pointer to the instruction implementation (assembly).
465 */
466FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
467{
468 switch (pVCpu->iem.s.enmEffOpSize)
469 {
470 case IEMMODE_16BIT:
471 {
472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
474
475 IEM_MC_BEGIN(3, 0);
476 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
477 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
479
480 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
481 IEM_MC_REF_EFLAGS(pEFlags);
482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
483
484 IEM_MC_ADVANCE_RIP();
485 IEM_MC_END();
486 return VINF_SUCCESS;
487 }
488
489 case IEMMODE_32BIT:
490 {
491 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
493
494 IEM_MC_BEGIN(3, 0);
495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
496 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
498
499 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
500 IEM_MC_REF_EFLAGS(pEFlags);
501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
502
503 if (pImpl != &g_iemAImpl_test)
504 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
505 IEM_MC_ADVANCE_RIP();
506 IEM_MC_END();
507 return VINF_SUCCESS;
508 }
509
510 case IEMMODE_64BIT:
511 {
512 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
514
515 IEM_MC_BEGIN(3, 0);
516 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
517 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
519
520 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
521 IEM_MC_REF_EFLAGS(pEFlags);
522 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
523
524 IEM_MC_ADVANCE_RIP();
525 IEM_MC_END();
526 return VINF_SUCCESS;
527 }
528
529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
530 }
531}
532
533
534/** Opcodes 0xf1, 0xd6. */
535FNIEMOP_DEF(iemOp_Invalid)
536{
537 IEMOP_MNEMONIC(Invalid, "Invalid");
538 return IEMOP_RAISE_INVALID_OPCODE();
539}
540
541
542/** Invalid with RM byte . */
543FNIEMOPRM_DEF(iemOp_InvalidWithRM)
544{
545 RT_NOREF_PV(bRm);
546 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
547 return IEMOP_RAISE_INVALID_OPCODE();
548}
549
550
551
552/** @name ..... opcodes.
553 *
554 * @{
555 */
556
557/** @} */
558
559
560/** @name Two byte opcodes (first byte 0x0f).
561 *
562 * @{
563 */
564
565/** Opcode 0x0f 0x00 /0. */
566FNIEMOPRM_DEF(iemOp_Grp6_sldt)
567{
568 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
569 IEMOP_HLP_MIN_286();
570 IEMOP_HLP_NO_REAL_OR_V86_MODE();
571
572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
573 {
574 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
575 switch (pVCpu->iem.s.enmEffOpSize)
576 {
577 case IEMMODE_16BIT:
578 IEM_MC_BEGIN(0, 1);
579 IEM_MC_LOCAL(uint16_t, u16Ldtr);
580 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
581 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr);
582 IEM_MC_ADVANCE_RIP();
583 IEM_MC_END();
584 break;
585
586 case IEMMODE_32BIT:
587 IEM_MC_BEGIN(0, 1);
588 IEM_MC_LOCAL(uint32_t, u32Ldtr);
589 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
590 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr);
591 IEM_MC_ADVANCE_RIP();
592 IEM_MC_END();
593 break;
594
595 case IEMMODE_64BIT:
596 IEM_MC_BEGIN(0, 1);
597 IEM_MC_LOCAL(uint64_t, u64Ldtr);
598 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
599 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr);
600 IEM_MC_ADVANCE_RIP();
601 IEM_MC_END();
602 break;
603
604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
605 }
606 }
607 else
608 {
609 IEM_MC_BEGIN(0, 2);
610 IEM_MC_LOCAL(uint16_t, u16Ldtr);
611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
613 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
614 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
615 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr);
616 IEM_MC_ADVANCE_RIP();
617 IEM_MC_END();
618 }
619 return VINF_SUCCESS;
620}
621
622
623/** Opcode 0x0f 0x00 /1. */
624FNIEMOPRM_DEF(iemOp_Grp6_str)
625{
626 IEMOP_MNEMONIC(str, "str Rv/Mw");
627 IEMOP_HLP_MIN_286();
628 IEMOP_HLP_NO_REAL_OR_V86_MODE();
629
630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
631 {
632 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
633 switch (pVCpu->iem.s.enmEffOpSize)
634 {
635 case IEMMODE_16BIT:
636 IEM_MC_BEGIN(0, 1);
637 IEM_MC_LOCAL(uint16_t, u16Tr);
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 break;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(0, 1);
646 IEM_MC_LOCAL(uint32_t, u32Tr);
647 IEM_MC_FETCH_TR_U32(u32Tr);
648 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr);
649 IEM_MC_ADVANCE_RIP();
650 IEM_MC_END();
651 break;
652
653 case IEMMODE_64BIT:
654 IEM_MC_BEGIN(0, 1);
655 IEM_MC_LOCAL(uint64_t, u64Tr);
656 IEM_MC_FETCH_TR_U64(u64Tr);
657 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr);
658 IEM_MC_ADVANCE_RIP();
659 IEM_MC_END();
660 break;
661
662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
663 }
664 }
665 else
666 {
667 IEM_MC_BEGIN(0, 2);
668 IEM_MC_LOCAL(uint16_t, u16Tr);
669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
671 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
672 IEM_MC_FETCH_TR_U16(u16Tr);
673 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr);
674 IEM_MC_ADVANCE_RIP();
675 IEM_MC_END();
676 }
677 return VINF_SUCCESS;
678}
679
680
681/** Opcode 0x0f 0x00 /2. */
682FNIEMOPRM_DEF(iemOp_Grp6_lldt)
683{
684 IEMOP_MNEMONIC(lldt, "lldt Ew");
685 IEMOP_HLP_MIN_286();
686 IEMOP_HLP_NO_REAL_OR_V86_MODE();
687
688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
689 {
690 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
691 IEM_MC_BEGIN(1, 0);
692 IEM_MC_ARG(uint16_t, u16Sel, 0);
693 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
694 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
695 IEM_MC_END();
696 }
697 else
698 {
699 IEM_MC_BEGIN(1, 1);
700 IEM_MC_ARG(uint16_t, u16Sel, 0);
701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
703 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
704 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
705 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
706 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
707 IEM_MC_END();
708 }
709 return VINF_SUCCESS;
710}
711
712
713/** Opcode 0x0f 0x00 /3. */
714FNIEMOPRM_DEF(iemOp_Grp6_ltr)
715{
716 IEMOP_MNEMONIC(ltr, "ltr Ew");
717 IEMOP_HLP_MIN_286();
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
723 IEM_MC_BEGIN(1, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
726 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
727 IEM_MC_END();
728 }
729 else
730 {
731 IEM_MC_BEGIN(1, 1);
732 IEM_MC_ARG(uint16_t, u16Sel, 0);
733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
736 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
737 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
738 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
739 IEM_MC_END();
740 }
741 return VINF_SUCCESS;
742}
743
744
745/** Opcode 0x0f 0x00 /3. */
746FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
747{
748 IEMOP_HLP_MIN_286();
749 IEMOP_HLP_NO_REAL_OR_V86_MODE();
750
751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
752 {
753 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
754 IEM_MC_BEGIN(2, 0);
755 IEM_MC_ARG(uint16_t, u16Sel, 0);
756 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
757 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
758 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
759 IEM_MC_END();
760 }
761 else
762 {
763 IEM_MC_BEGIN(2, 1);
764 IEM_MC_ARG(uint16_t, u16Sel, 0);
765 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
768 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
769 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
770 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
771 IEM_MC_END();
772 }
773 return VINF_SUCCESS;
774}
775
776
777/** Opcode 0x0f 0x00 /4. */
778FNIEMOPRM_DEF(iemOp_Grp6_verr)
779{
780 IEMOP_MNEMONIC(verr, "verr Ew");
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
783}
784
785
786/** Opcode 0x0f 0x00 /5. */
787FNIEMOPRM_DEF(iemOp_Grp6_verw)
788{
789 IEMOP_MNEMONIC(verw, "verw Ew");
790 IEMOP_HLP_MIN_286();
791 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
792}
793
794
795/**
796 * Group 6 jump table.
797 */
798IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
799{
800 iemOp_Grp6_sldt,
801 iemOp_Grp6_str,
802 iemOp_Grp6_lldt,
803 iemOp_Grp6_ltr,
804 iemOp_Grp6_verr,
805 iemOp_Grp6_verw,
806 iemOp_InvalidWithRM,
807 iemOp_InvalidWithRM
808};
809
810/** Opcode 0x0f 0x00. */
811FNIEMOP_DEF(iemOp_Grp6)
812{
813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
814 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm);
815}
816
817
818/** Opcode 0x0f 0x01 /0. */
819FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
820{
821 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
822 IEMOP_HLP_MIN_286();
823 IEMOP_HLP_64BIT_OP_SIZE();
824 IEM_MC_BEGIN(2, 1);
825 IEM_MC_ARG(uint8_t, iEffSeg, 0);
826 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
829 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
830 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
831 IEM_MC_END();
832 return VINF_SUCCESS;
833}
834
835
836/** Opcode 0x0f 0x01 /0. */
837FNIEMOP_DEF(iemOp_Grp7_vmcall)
838{
839 IEMOP_BITCH_ABOUT_STUB();
840 return IEMOP_RAISE_INVALID_OPCODE();
841}
842
843
844/** Opcode 0x0f 0x01 /0. */
845FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
846{
847 IEMOP_BITCH_ABOUT_STUB();
848 return IEMOP_RAISE_INVALID_OPCODE();
849}
850
851
852/** Opcode 0x0f 0x01 /0. */
853FNIEMOP_DEF(iemOp_Grp7_vmresume)
854{
855 IEMOP_BITCH_ABOUT_STUB();
856 return IEMOP_RAISE_INVALID_OPCODE();
857}
858
859
860/** Opcode 0x0f 0x01 /0. */
861FNIEMOP_DEF(iemOp_Grp7_vmxoff)
862{
863 IEMOP_BITCH_ABOUT_STUB();
864 return IEMOP_RAISE_INVALID_OPCODE();
865}
866
867
868/** Opcode 0x0f 0x01 /1. */
869FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC(sidt, "sidt Ms");
872 IEMOP_HLP_MIN_286();
873 IEMOP_HLP_64BIT_OP_SIZE();
874 IEM_MC_BEGIN(2, 1);
875 IEM_MC_ARG(uint8_t, iEffSeg, 0);
876 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
880 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 /1. */
887FNIEMOP_DEF(iemOp_Grp7_monitor)
888{
889 IEMOP_MNEMONIC(monitor, "monitor");
890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
891 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
892}
893
894
895/** Opcode 0x0f 0x01 /1. */
896FNIEMOP_DEF(iemOp_Grp7_mwait)
897{
898 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
900 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
901}
902
903
904/** Opcode 0x0f 0x01 /2. */
905FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
906{
907 IEMOP_MNEMONIC(lgdt, "lgdt");
908 IEMOP_HLP_64BIT_OP_SIZE();
909 IEM_MC_BEGIN(3, 1);
910 IEM_MC_ARG(uint8_t, iEffSeg, 0);
911 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
915 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
916 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
917 IEM_MC_END();
918 return VINF_SUCCESS;
919}
920
921
922/** Opcode 0x0f 0x01 0xd0. */
923FNIEMOP_DEF(iemOp_Grp7_xgetbv)
924{
925 IEMOP_MNEMONIC(xgetbv, "xgetbv");
926 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
927 {
928 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
929 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
930 }
931 return IEMOP_RAISE_INVALID_OPCODE();
932}
933
934
935/** Opcode 0x0f 0x01 0xd1. */
936FNIEMOP_DEF(iemOp_Grp7_xsetbv)
937{
938 IEMOP_MNEMONIC(xsetbv, "xsetbv");
939 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
940 {
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
942 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
943 }
944 return IEMOP_RAISE_INVALID_OPCODE();
945}
946
947
948/** Opcode 0x0f 0x01 /3. */
949FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
950{
951 IEMOP_MNEMONIC(lidt, "lidt");
952 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
953 ? IEMMODE_64BIT
954 : pVCpu->iem.s.enmEffOpSize;
955 IEM_MC_BEGIN(3, 1);
956 IEM_MC_ARG(uint8_t, iEffSeg, 0);
957 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
958 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
961 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
962 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
963 IEM_MC_END();
964 return VINF_SUCCESS;
965}
966
967
968/** Opcode 0x0f 0x01 0xd8. */
969FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
970
971/** Opcode 0x0f 0x01 0xd9. */
972FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
973
974/** Opcode 0x0f 0x01 0xda. */
975FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
976
977/** Opcode 0x0f 0x01 0xdb. */
978FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
979
980/** Opcode 0x0f 0x01 0xdc. */
981FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
982
983/** Opcode 0x0f 0x01 0xdd. */
984FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
985
986/** Opcode 0x0f 0x01 0xde. */
987FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
988
989/** Opcode 0x0f 0x01 0xdf. */
990FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
991
992/** Opcode 0x0f 0x01 /4. */
993FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
994{
995 IEMOP_MNEMONIC(smsw, "smsw");
996 IEMOP_HLP_MIN_286();
997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
998 {
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 switch (pVCpu->iem.s.enmEffOpSize)
1001 {
1002 case IEMMODE_16BIT:
1003 IEM_MC_BEGIN(0, 1);
1004 IEM_MC_LOCAL(uint16_t, u16Tmp);
1005 IEM_MC_FETCH_CR0_U16(u16Tmp);
1006 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1007 { /* likely */ }
1008 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386)
1009 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1010 else
1011 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1012 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp);
1013 IEM_MC_ADVANCE_RIP();
1014 IEM_MC_END();
1015 return VINF_SUCCESS;
1016
1017 case IEMMODE_32BIT:
1018 IEM_MC_BEGIN(0, 1);
1019 IEM_MC_LOCAL(uint32_t, u32Tmp);
1020 IEM_MC_FETCH_CR0_U32(u32Tmp);
1021 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
1022 IEM_MC_ADVANCE_RIP();
1023 IEM_MC_END();
1024 return VINF_SUCCESS;
1025
1026 case IEMMODE_64BIT:
1027 IEM_MC_BEGIN(0, 1);
1028 IEM_MC_LOCAL(uint64_t, u64Tmp);
1029 IEM_MC_FETCH_CR0_U64(u64Tmp);
1030 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
1031 IEM_MC_ADVANCE_RIP();
1032 IEM_MC_END();
1033 return VINF_SUCCESS;
1034
1035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1036 }
1037 }
1038 else
1039 {
1040 /* Ignore operand size here, memory refs are always 16-bit. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(uint16_t, u16Tmp);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1046 IEM_MC_FETCH_CR0_U16(u16Tmp);
1047 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386)
1048 { /* likely */ }
1049 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386)
1050 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0);
1051 else
1052 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0);
1053 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
1054 IEM_MC_ADVANCE_RIP();
1055 IEM_MC_END();
1056 return VINF_SUCCESS;
1057 }
1058}
1059
1060
1061/** Opcode 0x0f 0x01 /6. */
1062FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1063{
1064 /* The operand size is effectively ignored, all is 16-bit and only the
1065 lower 3-bits are used. */
1066 IEMOP_MNEMONIC(lmsw, "lmsw");
1067 IEMOP_HLP_MIN_286();
1068 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1069 {
1070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1071 IEM_MC_BEGIN(1, 0);
1072 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1073 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1074 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 IEM_MC_BEGIN(1, 1);
1080 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1084 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1085 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1086 IEM_MC_END();
1087 }
1088 return VINF_SUCCESS;
1089}
1090
1091
1092/** Opcode 0x0f 0x01 /7. */
1093FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1094{
1095 IEMOP_MNEMONIC(invlpg, "invlpg");
1096 IEMOP_HLP_MIN_486();
1097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1098 IEM_MC_BEGIN(1, 1);
1099 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1101 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1102 IEM_MC_END();
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** Opcode 0x0f 0x01 /7. */
1108FNIEMOP_DEF(iemOp_Grp7_swapgs)
1109{
1110 IEMOP_MNEMONIC(swapgs, "swapgs");
1111 IEMOP_HLP_ONLY_64BIT();
1112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1113 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1114}
1115
1116
1117/** Opcode 0x0f 0x01 /7. */
1118FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1119{
1120 NOREF(pVCpu);
1121 IEMOP_BITCH_ABOUT_STUB();
1122 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1123}
1124
1125
1126/** Opcode 0x0f 0x01. */
1127FNIEMOP_DEF(iemOp_Grp7)
1128{
1129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1130 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1131 {
1132 case 0:
1133 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1134 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1135 switch (bRm & X86_MODRM_RM_MASK)
1136 {
1137 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1138 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1139 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1140 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1141 }
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143
1144 case 1:
1145 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1146 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1147 switch (bRm & X86_MODRM_RM_MASK)
1148 {
1149 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1150 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1151 }
1152 return IEMOP_RAISE_INVALID_OPCODE();
1153
1154 case 2:
1155 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1156 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1157 switch (bRm & X86_MODRM_RM_MASK)
1158 {
1159 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1160 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1161 }
1162 return IEMOP_RAISE_INVALID_OPCODE();
1163
1164 case 3:
1165 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1166 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1167 switch (bRm & X86_MODRM_RM_MASK)
1168 {
1169 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1170 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1171 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1172 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1173 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1174 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1175 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1176 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1178 }
1179
1180 case 4:
1181 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1182
1183 case 5:
1184 return IEMOP_RAISE_INVALID_OPCODE();
1185
1186 case 6:
1187 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1188
1189 case 7:
1190 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1191 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1192 switch (bRm & X86_MODRM_RM_MASK)
1193 {
1194 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1195 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1196 }
1197 return IEMOP_RAISE_INVALID_OPCODE();
1198
1199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1200 }
1201}
1202
1203/** Opcode 0x0f 0x00 /3. */
1204FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1205{
1206 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1208
1209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1210 {
1211 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1212 switch (pVCpu->iem.s.enmEffOpSize)
1213 {
1214 case IEMMODE_16BIT:
1215 {
1216 IEM_MC_BEGIN(3, 0);
1217 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1218 IEM_MC_ARG(uint16_t, u16Sel, 1);
1219 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1220
1221 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1222 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1223 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1224
1225 IEM_MC_END();
1226 return VINF_SUCCESS;
1227 }
1228
1229 case IEMMODE_32BIT:
1230 case IEMMODE_64BIT:
1231 {
1232 IEM_MC_BEGIN(3, 0);
1233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1234 IEM_MC_ARG(uint16_t, u16Sel, 1);
1235 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1236
1237 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1238 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1239 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1240
1241 IEM_MC_END();
1242 return VINF_SUCCESS;
1243 }
1244
1245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1246 }
1247 }
1248 else
1249 {
1250 switch (pVCpu->iem.s.enmEffOpSize)
1251 {
1252 case IEMMODE_16BIT:
1253 {
1254 IEM_MC_BEGIN(3, 1);
1255 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1256 IEM_MC_ARG(uint16_t, u16Sel, 1);
1257 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1259
1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1261 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1262
1263 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1264 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1265 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1266
1267 IEM_MC_END();
1268 return VINF_SUCCESS;
1269 }
1270
1271 case IEMMODE_32BIT:
1272 case IEMMODE_64BIT:
1273 {
1274 IEM_MC_BEGIN(3, 1);
1275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1276 IEM_MC_ARG(uint16_t, u16Sel, 1);
1277 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1279
1280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1281 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1282/** @todo testcase: make sure it's a 16-bit read. */
1283
1284 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1286 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1287
1288 IEM_MC_END();
1289 return VINF_SUCCESS;
1290 }
1291
1292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1293 }
1294 }
1295}
1296
1297
1298
1299/** Opcode 0x0f 0x02. */
1300FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1301{
1302 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1303 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1304}
1305
1306
1307/** Opcode 0x0f 0x03. */
1308FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1309{
1310 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1311 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1312}
1313
1314
1315/** Opcode 0x0f 0x05. */
1316FNIEMOP_DEF(iemOp_syscall)
1317{
1318 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1321}
1322
1323
1324/** Opcode 0x0f 0x06. */
1325FNIEMOP_DEF(iemOp_clts)
1326{
1327 IEMOP_MNEMONIC(clts, "clts");
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1330}
1331
1332
1333/** Opcode 0x0f 0x07. */
1334FNIEMOP_DEF(iemOp_sysret)
1335{
1336 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1339}
1340
1341
1342/** Opcode 0x0f 0x08. */
1343FNIEMOP_STUB(iemOp_invd);
1344// IEMOP_HLP_MIN_486();
1345
1346
1347/** Opcode 0x0f 0x09. */
1348FNIEMOP_DEF(iemOp_wbinvd)
1349{
1350 IEMOP_MNEMONIC(wbinvd, "wbinvd");
1351 IEMOP_HLP_MIN_486();
1352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1353 IEM_MC_BEGIN(0, 0);
1354 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1355 IEM_MC_ADVANCE_RIP();
1356 IEM_MC_END();
1357 return VINF_SUCCESS; /* ignore for now */
1358}
1359
1360
1361/** Opcode 0x0f 0x0b. */
1362FNIEMOP_DEF(iemOp_ud2)
1363{
1364 IEMOP_MNEMONIC(ud2, "ud2");
1365 return IEMOP_RAISE_INVALID_OPCODE();
1366}
1367
1368/** Opcode 0x0f 0x0d. */
1369FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1370{
1371 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1372 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
1373 {
1374 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
1375 return IEMOP_RAISE_INVALID_OPCODE();
1376 }
1377
1378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1380 {
1381 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
1382 return IEMOP_RAISE_INVALID_OPCODE();
1383 }
1384
1385 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1386 {
1387 case 2: /* Aliased to /0 for the time being. */
1388 case 4: /* Aliased to /0 for the time being. */
1389 case 5: /* Aliased to /0 for the time being. */
1390 case 6: /* Aliased to /0 for the time being. */
1391 case 7: /* Aliased to /0 for the time being. */
1392 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
1393 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
1394 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
1395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1396 }
1397
1398 IEM_MC_BEGIN(0, 1);
1399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1402 /* Currently a NOP. */
1403 NOREF(GCPtrEffSrc);
1404 IEM_MC_ADVANCE_RIP();
1405 IEM_MC_END();
1406 return VINF_SUCCESS;
1407}
1408
1409
1410/** Opcode 0x0f 0x0e. */
1411FNIEMOP_STUB(iemOp_femms);
1412
1413
1414/** Opcode 0x0f 0x0f 0x0c. */
1415FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1416
1417/** Opcode 0x0f 0x0f 0x0d. */
1418FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1419
1420/** Opcode 0x0f 0x0f 0x1c. */
1421FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1422
1423/** Opcode 0x0f 0x0f 0x1d. */
1424FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1425
1426/** Opcode 0x0f 0x0f 0x8a. */
1427FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1428
1429/** Opcode 0x0f 0x0f 0x8e. */
1430FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1431
1432/** Opcode 0x0f 0x0f 0x90. */
1433FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1434
1435/** Opcode 0x0f 0x0f 0x94. */
1436FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1437
1438/** Opcode 0x0f 0x0f 0x96. */
1439FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1440
1441/** Opcode 0x0f 0x0f 0x97. */
1442FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1443
1444/** Opcode 0x0f 0x0f 0x9a. */
1445FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1446
1447/** Opcode 0x0f 0x0f 0x9e. */
1448FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1449
1450/** Opcode 0x0f 0x0f 0xa0. */
1451FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1452
1453/** Opcode 0x0f 0x0f 0xa4. */
1454FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1455
1456/** Opcode 0x0f 0x0f 0xa6. */
1457FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1458
1459/** Opcode 0x0f 0x0f 0xa7. */
1460FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1461
1462/** Opcode 0x0f 0x0f 0xaa. */
1463FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1464
1465/** Opcode 0x0f 0x0f 0xae. */
1466FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1467
1468/** Opcode 0x0f 0x0f 0xb0. */
1469FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1470
1471/** Opcode 0x0f 0x0f 0xb4. */
1472FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1473
1474/** Opcode 0x0f 0x0f 0xb6. */
1475FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1476
1477/** Opcode 0x0f 0x0f 0xb7. */
1478FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1479
1480/** Opcode 0x0f 0x0f 0xbb. */
1481FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1482
1483/** Opcode 0x0f 0x0f 0xbf. */
1484FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1485
1486
1487/** Opcode 0x0f 0x0f. */
1488FNIEMOP_DEF(iemOp_3Dnow)
1489{
1490 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
1491 {
1492 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
1493 return IEMOP_RAISE_INVALID_OPCODE();
1494 }
1495
1496 /* This is pretty sparse, use switch instead of table. */
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 switch (b)
1499 {
1500 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1501 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1502 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1503 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1504 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1505 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1506 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1507 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1508 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1509 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1510 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1511 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1512 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1513 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1514 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1515 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1516 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1517 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1518 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1519 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1520 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1521 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1522 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1523 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1524 default:
1525 return IEMOP_RAISE_INVALID_OPCODE();
1526 }
1527}
1528
1529
1530/** Opcode 0x0f 0x10. */
1531FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1532
1533
1534/** Opcode 0x0f 0x11. */
1535FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd)
1536{
1537 /* Quick hack. Need to restructure all of this later some time. */
1538 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ);
1539 if (fRelevantPrefix == 0)
1540 {
1541 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps");
1542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1544 {
1545 /*
1546 * Register, register.
1547 */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_BEGIN(0, 0);
1550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1552 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1553 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1554 IEM_MC_ADVANCE_RIP();
1555 IEM_MC_END();
1556 }
1557 else
1558 {
1559 /*
1560 * Memory, register.
1561 */
1562 IEM_MC_BEGIN(0, 2);
1563 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1565
1566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1567 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1568 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1569 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1570
1571 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1572 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1573
1574 IEM_MC_ADVANCE_RIP();
1575 IEM_MC_END();
1576 }
1577 }
1578 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ)
1579 {
1580 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd");
1581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1583 {
1584 /*
1585 * Register, register.
1586 */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1588 IEM_MC_BEGIN(0, 1);
1589 IEM_MC_LOCAL(uint64_t, uSrc);
1590
1591 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1593 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1594 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1595
1596 IEM_MC_ADVANCE_RIP();
1597 IEM_MC_END();
1598 }
1599 else
1600 {
1601 /*
1602 * Memory, register.
1603 */
1604 IEM_MC_BEGIN(0, 2);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1610 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP();
1617 IEM_MC_END();
1618 }
1619 }
1620 else
1621 {
1622 IEMOP_BITCH_ABOUT_STUB();
1623 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1624 }
1625 return VINF_SUCCESS;
1626}
1627
1628
1629/** Opcode 0x0f 0x12. */
1630FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1631
1632
1633/** Opcode 0x0f 0x13. */
1634FNIEMOP_DEF(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq)
1635{
1636 /* Quick hack. Need to restructure all of this later some time. */
1637 if (pVCpu->iem.s.fPrefixes == IEM_OP_PRF_SIZE_OP)
1638 {
1639 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq");
1640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1642 {
1643#if 0
1644 /*
1645 * Register, register.
1646 */
1647 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1648 IEM_MC_BEGIN(0, 1);
1649 IEM_MC_LOCAL(uint64_t, uSrc);
1650 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1651 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1652 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1653 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc);
1654 IEM_MC_ADVANCE_RIP();
1655 IEM_MC_END();
1656#else
1657 return IEMOP_RAISE_INVALID_OPCODE();
1658#endif
1659 }
1660 else
1661 {
1662 /*
1663 * Memory, register.
1664 */
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint64_t, uSrc);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1668
1669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1670 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */
1671 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1672 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1673
1674 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1675 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1676
1677 IEM_MC_ADVANCE_RIP();
1678 IEM_MC_END();
1679 }
1680 return VINF_SUCCESS;
1681 }
1682
1683 IEMOP_BITCH_ABOUT_STUB();
1684 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1685}
1686
1687
1688/** Opcode 0x0f 0x14. */
1689FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1690/** Opcode 0x0f 0x15. */
1691FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1692/** Opcode 0x0f 0x16. */
1693FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1694/** Opcode 0x0f 0x17. */
1695FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1696
1697
1698/** Opcode 0x0f 0x18. */
1699FNIEMOP_DEF(iemOp_prefetch_Grp16)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1703 {
1704 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1705 {
1706 case 4: /* Aliased to /0 for the time being according to AMD. */
1707 case 5: /* Aliased to /0 for the time being according to AMD. */
1708 case 6: /* Aliased to /0 for the time being according to AMD. */
1709 case 7: /* Aliased to /0 for the time being according to AMD. */
1710 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
1711 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
1712 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
1713 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
1714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1715 }
1716
1717 IEM_MC_BEGIN(0, 1);
1718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1721 /* Currently a NOP. */
1722 NOREF(GCPtrEffSrc);
1723 IEM_MC_ADVANCE_RIP();
1724 IEM_MC_END();
1725 return VINF_SUCCESS;
1726 }
1727
1728 return IEMOP_RAISE_INVALID_OPCODE();
1729}
1730
1731
1732/** Opcode 0x0f 0x19..0x1f. */
1733FNIEMOP_DEF(iemOp_nop_Ev)
1734{
1735 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1738 {
1739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1740 IEM_MC_BEGIN(0, 0);
1741 IEM_MC_ADVANCE_RIP();
1742 IEM_MC_END();
1743 }
1744 else
1745 {
1746 IEM_MC_BEGIN(0, 1);
1747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1750 /* Currently a NOP. */
1751 NOREF(GCPtrEffSrc);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756}
1757
1758
1759/** Opcode 0x0f 0x20. */
1760FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1761{
1762 /* mod is ignored, as is operand size overrides. */
1763 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
1764 IEMOP_HLP_MIN_386();
1765 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1766 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1767 else
1768 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1769
1770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1771 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1773 {
1774 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1775 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1776 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1777 iCrReg |= 8;
1778 }
1779 switch (iCrReg)
1780 {
1781 case 0: case 2: case 3: case 4: case 8:
1782 break;
1783 default:
1784 return IEMOP_RAISE_INVALID_OPCODE();
1785 }
1786 IEMOP_HLP_DONE_DECODING();
1787
1788 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg);
1789}
1790
1791
1792/** Opcode 0x0f 0x21. */
1793FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1794{
1795 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
1796 IEMOP_HLP_MIN_386();
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1799 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1800 return IEMOP_RAISE_INVALID_OPCODE();
1801 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1802 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB,
1803 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1804}
1805
1806
1807/** Opcode 0x0f 0x22. */
1808FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1809{
1810 /* mod is ignored, as is operand size overrides. */
1811 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
1812 IEMOP_HLP_MIN_386();
1813 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1814 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1815 else
1816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
1817
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
1820 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
1821 {
1822 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1823 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
1824 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1825 iCrReg |= 8;
1826 }
1827 switch (iCrReg)
1828 {
1829 case 0: case 2: case 3: case 4: case 8:
1830 break;
1831 default:
1832 return IEMOP_RAISE_INVALID_OPCODE();
1833 }
1834 IEMOP_HLP_DONE_DECODING();
1835
1836 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1837}
1838
1839
1840/** Opcode 0x0f 0x23. */
1841FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1842{
1843 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
1844 IEMOP_HLP_MIN_386();
1845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1847 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
1848 return IEMOP_RAISE_INVALID_OPCODE();
1849 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1850 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1851 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB);
1852}
1853
1854
1855/** Opcode 0x0f 0x24. */
1856FNIEMOP_DEF(iemOp_mov_Rd_Td)
1857{
1858 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
1859 /** @todo works on 386 and 486. */
1860 /* The RM byte is not considered, see testcase. */
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862}
1863
1864
1865/** Opcode 0x0f 0x26. */
1866FNIEMOP_DEF(iemOp_mov_Td_Rd)
1867{
1868 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
1869 /** @todo works on 386 and 486. */
1870 /* The RM byte is not considered, see testcase. */
1871 return IEMOP_RAISE_INVALID_OPCODE();
1872}
1873
1874
1875/** Opcode 0x0f 0x28. */
1876FNIEMOP_DEF(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd)
1877{
1878 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1879 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr");
1880 else
1881 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr");
1882 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1883 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1884 {
1885 /*
1886 * Register, register.
1887 */
1888 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1889 IEM_MC_BEGIN(0, 0);
1890 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1892 else
1893 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1894 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1895 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
1896 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1897 IEM_MC_ADVANCE_RIP();
1898 IEM_MC_END();
1899 }
1900 else
1901 {
1902 /*
1903 * Register, memory.
1904 */
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1911 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1913 else
1914 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1915 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1916
1917 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc);
1919
1920 IEM_MC_ADVANCE_RIP();
1921 IEM_MC_END();
1922 }
1923 return VINF_SUCCESS;
1924}
1925
1926
1927/** Opcode 0x0f 0x29. */
1928FNIEMOP_DEF(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd)
1929{
1930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1931 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps");
1932 else
1933 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd");
1934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1935 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1936 {
1937 /*
1938 * Register, register.
1939 */
1940 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1941 IEM_MC_BEGIN(0, 0);
1942 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1944 else
1945 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1946 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1947 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
1948 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1949 IEM_MC_ADVANCE_RIP();
1950 IEM_MC_END();
1951 }
1952 else
1953 {
1954 /*
1955 * Memory, register.
1956 */
1957 IEM_MC_BEGIN(0, 2);
1958 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1960
1961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
1963 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1964 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1965 else
1966 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
1967 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
1968
1969 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1970 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1971
1972 IEM_MC_ADVANCE_RIP();
1973 IEM_MC_END();
1974 }
1975 return VINF_SUCCESS;
1976}
1977
1978
1979/** Opcode 0x0f 0x2a. */
1980FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1981
1982
1983/** Opcode 0x0f 0x2b. */
1984FNIEMOP_DEF(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd)
1985{
1986 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
1987 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps");
1988 else
1989 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd");
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1992 {
1993 /*
1994 * memory, register.
1995 */
1996 IEM_MC_BEGIN(0, 2);
1997 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */
1998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1999
2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ */
2002 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP))
2003 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2004 else
2005 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2007
2008 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2009 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 /* The register, register encoding is invalid. */
2015 else
2016 return IEMOP_RAISE_INVALID_OPCODE();
2017 return VINF_SUCCESS;
2018}
2019
2020
2021/** Opcode 0x0f 0x2c. */
2022FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
2023/** Opcode 0x0f 0x2d. */
2024FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
2025/** Opcode 0x0f 0x2e. */
2026FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
2027/** Opcode 0x0f 0x2f. */
2028FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
2029
2030
2031/** Opcode 0x0f 0x30. */
2032FNIEMOP_DEF(iemOp_wrmsr)
2033{
2034 IEMOP_MNEMONIC(wrmsr, "wrmsr");
2035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2036 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
2037}
2038
2039
2040/** Opcode 0x0f 0x31. */
2041FNIEMOP_DEF(iemOp_rdtsc)
2042{
2043 IEMOP_MNEMONIC(rdtsc, "rdtsc");
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
2046}
2047
2048
2049/** Opcode 0x0f 0x33. */
2050FNIEMOP_DEF(iemOp_rdmsr)
2051{
2052 IEMOP_MNEMONIC(rdmsr, "rdmsr");
2053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2054 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
2055}
2056
2057
2058/** Opcode 0x0f 0x34. */
2059FNIEMOP_STUB(iemOp_rdpmc);
2060/** Opcode 0x0f 0x34. */
2061FNIEMOP_STUB(iemOp_sysenter);
2062/** Opcode 0x0f 0x35. */
2063FNIEMOP_STUB(iemOp_sysexit);
2064/** Opcode 0x0f 0x37. */
2065FNIEMOP_STUB(iemOp_getsec);
2066/** Opcode 0x0f 0x38. */
2067FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
2068/** Opcode 0x0f 0x3a. */
2069FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
2070
2071
2072/**
2073 * Implements a conditional move.
2074 *
2075 * Wish there was an obvious way to do this where we could share and reduce
2076 * code bloat.
2077 *
2078 * @param a_Cnd The conditional "microcode" operation.
2079 */
2080#define CMOV_X(a_Cnd) \
2081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
2083 { \
2084 switch (pVCpu->iem.s.enmEffOpSize) \
2085 { \
2086 case IEMMODE_16BIT: \
2087 IEM_MC_BEGIN(0, 1); \
2088 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2089 a_Cnd { \
2090 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2091 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2092 } IEM_MC_ENDIF(); \
2093 IEM_MC_ADVANCE_RIP(); \
2094 IEM_MC_END(); \
2095 return VINF_SUCCESS; \
2096 \
2097 case IEMMODE_32BIT: \
2098 IEM_MC_BEGIN(0, 1); \
2099 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2100 a_Cnd { \
2101 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2102 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2103 } IEM_MC_ELSE() { \
2104 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2105 } IEM_MC_ENDIF(); \
2106 IEM_MC_ADVANCE_RIP(); \
2107 IEM_MC_END(); \
2108 return VINF_SUCCESS; \
2109 \
2110 case IEMMODE_64BIT: \
2111 IEM_MC_BEGIN(0, 1); \
2112 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2113 a_Cnd { \
2114 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \
2115 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2116 } IEM_MC_ENDIF(); \
2117 IEM_MC_ADVANCE_RIP(); \
2118 IEM_MC_END(); \
2119 return VINF_SUCCESS; \
2120 \
2121 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2122 } \
2123 } \
2124 else \
2125 { \
2126 switch (pVCpu->iem.s.enmEffOpSize) \
2127 { \
2128 case IEMMODE_16BIT: \
2129 IEM_MC_BEGIN(0, 2); \
2130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2131 IEM_MC_LOCAL(uint16_t, u16Tmp); \
2132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2133 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2134 a_Cnd { \
2135 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \
2136 } IEM_MC_ENDIF(); \
2137 IEM_MC_ADVANCE_RIP(); \
2138 IEM_MC_END(); \
2139 return VINF_SUCCESS; \
2140 \
2141 case IEMMODE_32BIT: \
2142 IEM_MC_BEGIN(0, 2); \
2143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2144 IEM_MC_LOCAL(uint32_t, u32Tmp); \
2145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2146 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2147 a_Cnd { \
2148 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \
2149 } IEM_MC_ELSE() { \
2150 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \
2151 } IEM_MC_ENDIF(); \
2152 IEM_MC_ADVANCE_RIP(); \
2153 IEM_MC_END(); \
2154 return VINF_SUCCESS; \
2155 \
2156 case IEMMODE_64BIT: \
2157 IEM_MC_BEGIN(0, 2); \
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2159 IEM_MC_LOCAL(uint64_t, u64Tmp); \
2160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2161 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2162 a_Cnd { \
2163 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \
2164 } IEM_MC_ENDIF(); \
2165 IEM_MC_ADVANCE_RIP(); \
2166 IEM_MC_END(); \
2167 return VINF_SUCCESS; \
2168 \
2169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
2170 } \
2171 } do {} while (0)
2172
2173
2174
2175/** Opcode 0x0f 0x40. */
2176FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
2177{
2178 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
2179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
2180}
2181
2182
2183/** Opcode 0x0f 0x41. */
2184FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
2185{
2186 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
2187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
2188}
2189
2190
2191/** Opcode 0x0f 0x42. */
2192FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
2193{
2194 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
2195 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
2196}
2197
2198
2199/** Opcode 0x0f 0x43. */
2200FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
2201{
2202 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
2203 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
2204}
2205
2206
2207/** Opcode 0x0f 0x44. */
2208FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
2209{
2210 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
2211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
2212}
2213
2214
2215/** Opcode 0x0f 0x45. */
2216FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
2217{
2218 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
2219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
2220}
2221
2222
2223/** Opcode 0x0f 0x46. */
2224FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
2225{
2226 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
2227 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2228}
2229
2230
2231/** Opcode 0x0f 0x47. */
2232FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
2233{
2234 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
2235 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
2236}
2237
2238
2239/** Opcode 0x0f 0x48. */
2240FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
2241{
2242 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
2243 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
2244}
2245
2246
2247/** Opcode 0x0f 0x49. */
2248FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
2249{
2250 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
2251 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
2252}
2253
2254
2255/** Opcode 0x0f 0x4a. */
2256FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
2257{
2258 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
2259 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
2260}
2261
2262
2263/** Opcode 0x0f 0x4b. */
2264FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
2265{
2266 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
2267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
2268}
2269
2270
2271/** Opcode 0x0f 0x4c. */
2272FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
2273{
2274 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
2275 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
2276}
2277
2278
2279/** Opcode 0x0f 0x4d. */
2280FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
2281{
2282 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
2283 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
2284}
2285
2286
2287/** Opcode 0x0f 0x4e. */
2288FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
2289{
2290 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
2291 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2292}
2293
2294
2295/** Opcode 0x0f 0x4f. */
2296FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
2297{
2298 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
2299 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
2300}
2301
2302#undef CMOV_X
2303
2304/** Opcode 0x0f 0x50. */
2305FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
2306/** Opcode 0x0f 0x51. */
2307FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
2308/** Opcode 0x0f 0x52. */
2309FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
2310/** Opcode 0x0f 0x53. */
2311FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
2312/** Opcode 0x0f 0x54. */
2313FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
2314/** Opcode 0x0f 0x55. */
2315FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
2316/** Opcode 0x0f 0x56. */
2317FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
2318/** Opcode 0x0f 0x57. */
2319FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
2320/** Opcode 0x0f 0x58. */
2321FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
2322/** Opcode 0x0f 0x59. */
2323FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
2324/** Opcode 0x0f 0x5a. */
2325FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
2326/** Opcode 0x0f 0x5b. */
2327FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
2328/** Opcode 0x0f 0x5c. */
2329FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
2330/** Opcode 0x0f 0x5d. */
2331FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
2332/** Opcode 0x0f 0x5e. */
2333FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
2334/** Opcode 0x0f 0x5f. */
2335FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
2336
2337
2338/**
2339 * Common worker for SSE2 and MMX instructions on the forms:
2340 * pxxxx xmm1, xmm2/mem128
2341 * pxxxx mm1, mm2/mem32
2342 *
2343 * The 2nd operand is the first half of a register, which in the memory case
2344 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
2345 * memory accessed for MMX.
2346 *
2347 * Exceptions type 4.
2348 */
2349FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2350{
2351 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2352 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2353 {
2354 case IEM_OP_PRF_SIZE_OP: /* SSE */
2355 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2356 {
2357 /*
2358 * Register, register.
2359 */
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_BEGIN(2, 0);
2362 IEM_MC_ARG(uint128_t *, pDst, 0);
2363 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2364 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2365 IEM_MC_PREPARE_SSE_USAGE();
2366 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2367 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2368 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 else
2373 {
2374 /*
2375 * Register, memory.
2376 */
2377 IEM_MC_BEGIN(2, 2);
2378 IEM_MC_ARG(uint128_t *, pDst, 0);
2379 IEM_MC_LOCAL(uint64_t, uSrc);
2380 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2385 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2386 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2387
2388 IEM_MC_PREPARE_SSE_USAGE();
2389 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2390 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2391
2392 IEM_MC_ADVANCE_RIP();
2393 IEM_MC_END();
2394 }
2395 return VINF_SUCCESS;
2396
2397 case 0: /* MMX */
2398 if (!pImpl->pfnU64)
2399 return IEMOP_RAISE_INVALID_OPCODE();
2400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2401 {
2402 /*
2403 * Register, register.
2404 */
2405 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2406 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2408 IEM_MC_BEGIN(2, 0);
2409 IEM_MC_ARG(uint64_t *, pDst, 0);
2410 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2411 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2412 IEM_MC_PREPARE_FPU_USAGE();
2413 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2414 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2415 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2416 IEM_MC_ADVANCE_RIP();
2417 IEM_MC_END();
2418 }
2419 else
2420 {
2421 /*
2422 * Register, memory.
2423 */
2424 IEM_MC_BEGIN(2, 2);
2425 IEM_MC_ARG(uint64_t *, pDst, 0);
2426 IEM_MC_LOCAL(uint32_t, uSrc);
2427 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2429
2430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2432 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2433 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2434
2435 IEM_MC_PREPARE_FPU_USAGE();
2436 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2437 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2438
2439 IEM_MC_ADVANCE_RIP();
2440 IEM_MC_END();
2441 }
2442 return VINF_SUCCESS;
2443
2444 default:
2445 return IEMOP_RAISE_INVALID_OPCODE();
2446 }
2447}
2448
2449
2450/** Opcode 0x0f 0x60. */
2451FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2452{
2453 IEMOP_MNEMONIC(punpcklbw, "punpcklbw");
2454 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2455}
2456
2457
2458/** Opcode 0x0f 0x61. */
2459FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2460{
2461 IEMOP_MNEMONIC(punpcklwd, "punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2462 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2463}
2464
2465
2466/** Opcode 0x0f 0x62. */
2467FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2468{
2469 IEMOP_MNEMONIC(punpckldq, "punpckldq");
2470 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2471}
2472
2473
2474/** Opcode 0x0f 0x63. */
2475FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2476/** Opcode 0x0f 0x64. */
2477FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2478/** Opcode 0x0f 0x65. */
2479FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2480/** Opcode 0x0f 0x66. */
2481FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2482/** Opcode 0x0f 0x67. */
2483FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2484
2485
2486/**
2487 * Common worker for SSE2 and MMX instructions on the forms:
2488 * pxxxx xmm1, xmm2/mem128
2489 * pxxxx mm1, mm2/mem64
2490 *
2491 * The 2nd operand is the second half of a register, which in the memory case
2492 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2493 * where it may read the full 128 bits or only the upper 64 bits.
2494 *
2495 * Exceptions type 4.
2496 */
2497FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2498{
2499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2500 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2501 {
2502 case IEM_OP_PRF_SIZE_OP: /* SSE */
2503 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2504 {
2505 /*
2506 * Register, register.
2507 */
2508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2509 IEM_MC_BEGIN(2, 0);
2510 IEM_MC_ARG(uint128_t *, pDst, 0);
2511 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2512 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2513 IEM_MC_PREPARE_SSE_USAGE();
2514 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2515 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2516 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2517 IEM_MC_ADVANCE_RIP();
2518 IEM_MC_END();
2519 }
2520 else
2521 {
2522 /*
2523 * Register, memory.
2524 */
2525 IEM_MC_BEGIN(2, 2);
2526 IEM_MC_ARG(uint128_t *, pDst, 0);
2527 IEM_MC_LOCAL(uint128_t, uSrc);
2528 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2530
2531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2533 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2534 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2535
2536 IEM_MC_PREPARE_SSE_USAGE();
2537 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2538 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2539
2540 IEM_MC_ADVANCE_RIP();
2541 IEM_MC_END();
2542 }
2543 return VINF_SUCCESS;
2544
2545 case 0: /* MMX */
2546 if (!pImpl->pfnU64)
2547 return IEMOP_RAISE_INVALID_OPCODE();
2548 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2549 {
2550 /*
2551 * Register, register.
2552 */
2553 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2554 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEM_MC_BEGIN(2, 0);
2557 IEM_MC_ARG(uint64_t *, pDst, 0);
2558 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2559 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2560 IEM_MC_PREPARE_FPU_USAGE();
2561 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2562 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2563 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2564 IEM_MC_ADVANCE_RIP();
2565 IEM_MC_END();
2566 }
2567 else
2568 {
2569 /*
2570 * Register, memory.
2571 */
2572 IEM_MC_BEGIN(2, 2);
2573 IEM_MC_ARG(uint64_t *, pDst, 0);
2574 IEM_MC_LOCAL(uint64_t, uSrc);
2575 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2577
2578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2580 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2581 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2582
2583 IEM_MC_PREPARE_FPU_USAGE();
2584 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2585 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2586
2587 IEM_MC_ADVANCE_RIP();
2588 IEM_MC_END();
2589 }
2590 return VINF_SUCCESS;
2591
2592 default:
2593 return IEMOP_RAISE_INVALID_OPCODE();
2594 }
2595}
2596
2597
2598/** Opcode 0x0f 0x68. */
2599FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2600{
2601 IEMOP_MNEMONIC(punpckhbw, "punpckhbw");
2602 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2603}
2604
2605
2606/** Opcode 0x0f 0x69. */
2607FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2608{
2609 IEMOP_MNEMONIC(punpckhwd, "punpckhwd");
2610 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2611}
2612
2613
2614/** Opcode 0x0f 0x6a. */
2615FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2616{
2617 IEMOP_MNEMONIC(punpckhdq, "punpckhdq");
2618 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2619}
2620
2621/** Opcode 0x0f 0x6b. */
2622FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2623
2624
2625/** Opcode 0x0f 0x6c. */
2626FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq");
2629 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2630}
2631
2632
2633/** Opcode 0x0f 0x6d. */
2634FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2635{
2636 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq");
2637 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2638}
2639
2640
2641/** Opcode 0x0f 0x6e. */
2642FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2646 {
2647 case IEM_OP_PRF_SIZE_OP: /* SSE */
2648 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2649 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq");
2650 else
2651 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed");
2652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2653 {
2654 /* XMM, greg*/
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0, 1);
2657 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2658 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2660 {
2661 IEM_MC_LOCAL(uint64_t, u64Tmp);
2662 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2664 }
2665 else
2666 {
2667 IEM_MC_LOCAL(uint32_t, u32Tmp);
2668 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2669 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2670 }
2671 IEM_MC_ADVANCE_RIP();
2672 IEM_MC_END();
2673 }
2674 else
2675 {
2676 /* XMM, [mem] */
2677 IEM_MC_BEGIN(0, 2);
2678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2679 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */
2680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2683 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2684 {
2685 IEM_MC_LOCAL(uint64_t, u64Tmp);
2686 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2687 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2688 }
2689 else
2690 {
2691 IEM_MC_LOCAL(uint32_t, u32Tmp);
2692 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2693 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2694 }
2695 IEM_MC_ADVANCE_RIP();
2696 IEM_MC_END();
2697 }
2698 return VINF_SUCCESS;
2699
2700 case 0: /* MMX */
2701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2702 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq");
2703 else
2704 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed");
2705 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2706 {
2707 /* MMX, greg */
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 IEM_MC_BEGIN(0, 1);
2710 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2711 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2712 IEM_MC_LOCAL(uint64_t, u64Tmp);
2713 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2714 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2715 else
2716 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2717 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2718 IEM_MC_ADVANCE_RIP();
2719 IEM_MC_END();
2720 }
2721 else
2722 {
2723 /* MMX, [mem] */
2724 IEM_MC_BEGIN(0, 2);
2725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2726 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2731 {
2732 IEM_MC_LOCAL(uint64_t, u64Tmp);
2733 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2734 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2735 }
2736 else
2737 {
2738 IEM_MC_LOCAL(uint32_t, u32Tmp);
2739 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2740 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2741 }
2742 IEM_MC_ADVANCE_RIP();
2743 IEM_MC_END();
2744 }
2745 return VINF_SUCCESS;
2746
2747 default:
2748 return IEMOP_RAISE_INVALID_OPCODE();
2749 }
2750}
2751
2752
2753/** Opcode 0x0f 0x6f. */
2754FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2755{
2756 bool fAligned = false;
2757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2758 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2759 {
2760 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2761 fAligned = true;
2762 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2763 if (fAligned)
2764 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq");
2765 else
2766 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq");
2767 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2768 {
2769 /*
2770 * Register, register.
2771 */
2772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2773 IEM_MC_BEGIN(0, 0);
2774 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2775 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2776 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg,
2777 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 else
2782 {
2783 /*
2784 * Register, memory.
2785 */
2786 IEM_MC_BEGIN(0, 2);
2787 IEM_MC_LOCAL(uint128_t, u128Tmp);
2788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2789
2790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2792 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2793 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2794 if (fAligned)
2795 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2796 else
2797 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2798 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp);
2799
2800 IEM_MC_ADVANCE_RIP();
2801 IEM_MC_END();
2802 }
2803 return VINF_SUCCESS;
2804
2805 case 0: /* MMX */
2806 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq");
2807 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2808 {
2809 /*
2810 * Register, register.
2811 */
2812 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2813 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEM_MC_BEGIN(0, 1);
2816 IEM_MC_LOCAL(uint64_t, u64Tmp);
2817 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2818 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2819 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2820 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2821 IEM_MC_ADVANCE_RIP();
2822 IEM_MC_END();
2823 }
2824 else
2825 {
2826 /*
2827 * Register, memory.
2828 */
2829 IEM_MC_BEGIN(0, 2);
2830 IEM_MC_LOCAL(uint64_t, u64Tmp);
2831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2832
2833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2835 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2837 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2838 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2839
2840 IEM_MC_ADVANCE_RIP();
2841 IEM_MC_END();
2842 }
2843 return VINF_SUCCESS;
2844
2845 default:
2846 return IEMOP_RAISE_INVALID_OPCODE();
2847 }
2848}
2849
2850
2851/** Opcode 0x0f 0x70. The immediate here is evil! */
2852FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2853{
2854 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2855 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2856 {
2857 case IEM_OP_PRF_SIZE_OP: /* SSE */
2858 case IEM_OP_PRF_REPNZ: /* SSE */
2859 case IEM_OP_PRF_REPZ: /* SSE */
2860 {
2861 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2862 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2863 {
2864 case IEM_OP_PRF_SIZE_OP:
2865 IEMOP_MNEMONIC(pshufd_Vdq_Wdq, "pshufd Vdq,Wdq,Ib");
2866 pfnAImpl = iemAImpl_pshufd;
2867 break;
2868 case IEM_OP_PRF_REPNZ:
2869 IEMOP_MNEMONIC(pshuflw_Vdq_Wdq, "pshuflw Vdq,Wdq,Ib");
2870 pfnAImpl = iemAImpl_pshuflw;
2871 break;
2872 case IEM_OP_PRF_REPZ:
2873 IEMOP_MNEMONIC(pshufhw_Vdq_Wdq, "pshufhw Vdq,Wdq,Ib");
2874 pfnAImpl = iemAImpl_pshufhw;
2875 break;
2876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2877 }
2878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2879 {
2880 /*
2881 * Register, register.
2882 */
2883 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2885
2886 IEM_MC_BEGIN(3, 0);
2887 IEM_MC_ARG(uint128_t *, pDst, 0);
2888 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2889 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2890 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2891 IEM_MC_PREPARE_SSE_USAGE();
2892 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2893 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2894 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2895 IEM_MC_ADVANCE_RIP();
2896 IEM_MC_END();
2897 }
2898 else
2899 {
2900 /*
2901 * Register, memory.
2902 */
2903 IEM_MC_BEGIN(3, 2);
2904 IEM_MC_ARG(uint128_t *, pDst, 0);
2905 IEM_MC_LOCAL(uint128_t, uSrc);
2906 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2908
2909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2910 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2911 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2914
2915 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2916 IEM_MC_PREPARE_SSE_USAGE();
2917 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2918 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2919
2920 IEM_MC_ADVANCE_RIP();
2921 IEM_MC_END();
2922 }
2923 return VINF_SUCCESS;
2924 }
2925
2926 case 0: /* MMX Extension */
2927 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib");
2928 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2929 {
2930 /*
2931 * Register, register.
2932 */
2933 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935
2936 IEM_MC_BEGIN(3, 0);
2937 IEM_MC_ARG(uint64_t *, pDst, 0);
2938 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2939 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2940 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2941 IEM_MC_PREPARE_FPU_USAGE();
2942 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2943 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2944 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /*
2951 * Register, memory.
2952 */
2953 IEM_MC_BEGIN(3, 2);
2954 IEM_MC_ARG(uint64_t *, pDst, 0);
2955 IEM_MC_LOCAL(uint64_t, uSrc);
2956 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2957 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2958
2959 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2960 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2961 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2964
2965 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2966 IEM_MC_PREPARE_FPU_USAGE();
2967 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2968 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2969
2970 IEM_MC_ADVANCE_RIP();
2971 IEM_MC_END();
2972 }
2973 return VINF_SUCCESS;
2974
2975 default:
2976 return IEMOP_RAISE_INVALID_OPCODE();
2977 }
2978}
2979
2980
2981/** Opcode 0x0f 0x71 11/2. */
2982FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2983
2984/** Opcode 0x66 0x0f 0x71 11/2. */
2985FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2986
2987/** Opcode 0x0f 0x71 11/4. */
2988FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2989
2990/** Opcode 0x66 0x0f 0x71 11/4. */
2991FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2992
2993/** Opcode 0x0f 0x71 11/6. */
2994FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2995
2996/** Opcode 0x66 0x0f 0x71 11/6. */
2997FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2998
2999
3000/** Opcode 0x0f 0x71. */
3001FNIEMOP_DEF(iemOp_Grp12)
3002{
3003 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3004 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3005 return IEMOP_RAISE_INVALID_OPCODE();
3006 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3007 {
3008 case 0: case 1: case 3: case 5: case 7:
3009 return IEMOP_RAISE_INVALID_OPCODE();
3010 case 2:
3011 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3012 {
3013 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
3014 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
3015 default: return IEMOP_RAISE_INVALID_OPCODE();
3016 }
3017 case 4:
3018 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3019 {
3020 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
3021 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
3022 default: return IEMOP_RAISE_INVALID_OPCODE();
3023 }
3024 case 6:
3025 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3026 {
3027 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
3028 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
3029 default: return IEMOP_RAISE_INVALID_OPCODE();
3030 }
3031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3032 }
3033}
3034
3035
3036/** Opcode 0x0f 0x72 11/2. */
3037FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
3038
3039/** Opcode 0x66 0x0f 0x72 11/2. */
3040FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
3041
3042/** Opcode 0x0f 0x72 11/4. */
3043FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
3044
3045/** Opcode 0x66 0x0f 0x72 11/4. */
3046FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
3047
3048/** Opcode 0x0f 0x72 11/6. */
3049FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
3050
3051/** Opcode 0x66 0x0f 0x72 11/6. */
3052FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
3053
3054
3055/** Opcode 0x0f 0x72. */
3056FNIEMOP_DEF(iemOp_Grp13)
3057{
3058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3059 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3060 return IEMOP_RAISE_INVALID_OPCODE();
3061 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3062 {
3063 case 0: case 1: case 3: case 5: case 7:
3064 return IEMOP_RAISE_INVALID_OPCODE();
3065 case 2:
3066 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3067 {
3068 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
3069 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
3070 default: return IEMOP_RAISE_INVALID_OPCODE();
3071 }
3072 case 4:
3073 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3074 {
3075 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
3076 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
3077 default: return IEMOP_RAISE_INVALID_OPCODE();
3078 }
3079 case 6:
3080 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3081 {
3082 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
3083 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
3084 default: return IEMOP_RAISE_INVALID_OPCODE();
3085 }
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088}
3089
3090
3091/** Opcode 0x0f 0x73 11/2. */
3092FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
3093
3094/** Opcode 0x66 0x0f 0x73 11/2. */
3095FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
3096
3097/** Opcode 0x66 0x0f 0x73 11/3. */
3098FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
3099
3100/** Opcode 0x0f 0x73 11/6. */
3101FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
3102
3103/** Opcode 0x66 0x0f 0x73 11/6. */
3104FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
3105
3106/** Opcode 0x66 0x0f 0x73 11/7. */
3107FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
3108
3109
3110/** Opcode 0x0f 0x73. */
3111FNIEMOP_DEF(iemOp_Grp14)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3115 return IEMOP_RAISE_INVALID_OPCODE();
3116 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3117 {
3118 case 0: case 1: case 4: case 5:
3119 return IEMOP_RAISE_INVALID_OPCODE();
3120 case 2:
3121 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3122 {
3123 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
3124 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
3125 default: return IEMOP_RAISE_INVALID_OPCODE();
3126 }
3127 case 3:
3128 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3129 {
3130 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
3131 default: return IEMOP_RAISE_INVALID_OPCODE();
3132 }
3133 case 6:
3134 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3135 {
3136 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
3137 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
3138 default: return IEMOP_RAISE_INVALID_OPCODE();
3139 }
3140 case 7:
3141 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3142 {
3143 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
3144 default: return IEMOP_RAISE_INVALID_OPCODE();
3145 }
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * Common worker for SSE2 and MMX instructions on the forms:
3153 * pxxx mm1, mm2/mem64
3154 * pxxx xmm1, xmm2/mem128
3155 *
3156 * Proper alignment of the 128-bit operand is enforced.
3157 * Exceptions type 4. SSE2 and MMX cpuid checks.
3158 */
3159FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3163 {
3164 case IEM_OP_PRF_SIZE_OP: /* SSE */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 /*
3168 * Register, register.
3169 */
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 IEM_MC_BEGIN(2, 0);
3172 IEM_MC_ARG(uint128_t *, pDst, 0);
3173 IEM_MC_ARG(uint128_t const *, pSrc, 1);
3174 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3175 IEM_MC_PREPARE_SSE_USAGE();
3176 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3177 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3178 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 else
3183 {
3184 /*
3185 * Register, memory.
3186 */
3187 IEM_MC_BEGIN(2, 2);
3188 IEM_MC_ARG(uint128_t *, pDst, 0);
3189 IEM_MC_LOCAL(uint128_t, uSrc);
3190 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3192
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3196 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3197
3198 IEM_MC_PREPARE_SSE_USAGE();
3199 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3200 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3201
3202 IEM_MC_ADVANCE_RIP();
3203 IEM_MC_END();
3204 }
3205 return VINF_SUCCESS;
3206
3207 case 0: /* MMX */
3208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3209 {
3210 /*
3211 * Register, register.
3212 */
3213 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3214 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3216 IEM_MC_BEGIN(2, 0);
3217 IEM_MC_ARG(uint64_t *, pDst, 0);
3218 IEM_MC_ARG(uint64_t const *, pSrc, 1);
3219 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3220 IEM_MC_PREPARE_FPU_USAGE();
3221 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3222 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
3223 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 }
3227 else
3228 {
3229 /*
3230 * Register, memory.
3231 */
3232 IEM_MC_BEGIN(2, 2);
3233 IEM_MC_ARG(uint64_t *, pDst, 0);
3234 IEM_MC_LOCAL(uint64_t, uSrc);
3235 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
3236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3237
3238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3240 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3241 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3242
3243 IEM_MC_PREPARE_FPU_USAGE();
3244 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3245 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
3246
3247 IEM_MC_ADVANCE_RIP();
3248 IEM_MC_END();
3249 }
3250 return VINF_SUCCESS;
3251
3252 default:
3253 return IEMOP_RAISE_INVALID_OPCODE();
3254 }
3255}
3256
3257
3258/** Opcode 0x0f 0x74. */
3259FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
3260{
3261 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb");
3262 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
3263}
3264
3265
3266/** Opcode 0x0f 0x75. */
3267FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
3268{
3269 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw");
3270 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
3271}
3272
3273
3274/** Opcode 0x0f 0x76. */
3275FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
3276{
3277 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd");
3278 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
3279}
3280
3281
3282/** Opcode 0x0f 0x77. */
3283FNIEMOP_STUB(iemOp_emms);
3284/** Opcode 0x0f 0x78. */
3285FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
3286/** Opcode 0x0f 0x79. */
3287FNIEMOP_UD_STUB(iemOp_vmwrite);
3288/** Opcode 0x0f 0x7c. */
3289FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
3290/** Opcode 0x0f 0x7d. */
3291FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
3292
3293
3294/** Opcode 0x0f 0x7e. */
3295FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
3296{
3297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3298 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3299 {
3300 case IEM_OP_PRF_SIZE_OP: /* SSE */
3301 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3302 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq");
3303 else
3304 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd");
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* greg, XMM */
3308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3309 IEM_MC_BEGIN(0, 1);
3310 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3311 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3312 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3313 {
3314 IEM_MC_LOCAL(uint64_t, u64Tmp);
3315 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3316 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3317 }
3318 else
3319 {
3320 IEM_MC_LOCAL(uint32_t, u32Tmp);
3321 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3322 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3323 }
3324 IEM_MC_ADVANCE_RIP();
3325 IEM_MC_END();
3326 }
3327 else
3328 {
3329 /* [mem], XMM */
3330 IEM_MC_BEGIN(0, 2);
3331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3332 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3335 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3337 {
3338 IEM_MC_LOCAL(uint64_t, u64Tmp);
3339 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3340 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3341 }
3342 else
3343 {
3344 IEM_MC_LOCAL(uint32_t, u32Tmp);
3345 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3347 }
3348 IEM_MC_ADVANCE_RIP();
3349 IEM_MC_END();
3350 }
3351 return VINF_SUCCESS;
3352
3353 case 0: /* MMX */
3354 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3355 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq");
3356 else
3357 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd");
3358 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3359 {
3360 /* greg, MMX */
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_BEGIN(0, 1);
3363 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3365 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3366 {
3367 IEM_MC_LOCAL(uint64_t, u64Tmp);
3368 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3369 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp);
3370 }
3371 else
3372 {
3373 IEM_MC_LOCAL(uint32_t, u32Tmp);
3374 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3375 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp);
3376 }
3377 IEM_MC_ADVANCE_RIP();
3378 IEM_MC_END();
3379 }
3380 else
3381 {
3382 /* [mem], MMX */
3383 IEM_MC_BEGIN(0, 2);
3384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3385 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3390 {
3391 IEM_MC_LOCAL(uint64_t, u64Tmp);
3392 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3393 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3394 }
3395 else
3396 {
3397 IEM_MC_LOCAL(uint32_t, u32Tmp);
3398 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3399 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3400 }
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 return VINF_SUCCESS;
3405
3406 default:
3407 return IEMOP_RAISE_INVALID_OPCODE();
3408 }
3409}
3410
3411
3412/** Opcode 0x0f 0x7f. */
3413FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3414{
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 bool fAligned = false;
3417 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3418 {
3419 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3420 fAligned = true;
3421 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3422 if (fAligned)
3423 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wdq,Vdq");
3424 else
3425 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wdq,Vdq");
3426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3427 {
3428 /*
3429 * Register, register.
3430 */
3431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3435 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB,
3436 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3437 IEM_MC_ADVANCE_RIP();
3438 IEM_MC_END();
3439 }
3440 else
3441 {
3442 /*
3443 * Register, memory.
3444 */
3445 IEM_MC_BEGIN(0, 2);
3446 IEM_MC_LOCAL(uint128_t, u128Tmp);
3447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3448
3449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3451 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3453
3454 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3455 if (fAligned)
3456 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3457 else
3458 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
3459
3460 IEM_MC_ADVANCE_RIP();
3461 IEM_MC_END();
3462 }
3463 return VINF_SUCCESS;
3464
3465 case 0: /* MMX */
3466 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq");
3467
3468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3474 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3476 IEM_MC_BEGIN(0, 1);
3477 IEM_MC_LOCAL(uint64_t, u64Tmp);
3478 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
3480 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3481 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3482 IEM_MC_ADVANCE_RIP();
3483 IEM_MC_END();
3484 }
3485 else
3486 {
3487 /*
3488 * Register, memory.
3489 */
3490 IEM_MC_BEGIN(0, 2);
3491 IEM_MC_LOCAL(uint64_t, u64Tmp);
3492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3493
3494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3496 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3497 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
3498
3499 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3500 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3501
3502 IEM_MC_ADVANCE_RIP();
3503 IEM_MC_END();
3504 }
3505 return VINF_SUCCESS;
3506
3507 default:
3508 return IEMOP_RAISE_INVALID_OPCODE();
3509 }
3510}
3511
3512
3513
3514/** Opcode 0x0f 0x80. */
3515FNIEMOP_DEF(iemOp_jo_Jv)
3516{
3517 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
3518 IEMOP_HLP_MIN_386();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3520 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3521 {
3522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3524
3525 IEM_MC_BEGIN(0, 0);
3526 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3527 IEM_MC_REL_JMP_S16(i16Imm);
3528 } IEM_MC_ELSE() {
3529 IEM_MC_ADVANCE_RIP();
3530 } IEM_MC_ENDIF();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537
3538 IEM_MC_BEGIN(0, 0);
3539 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3540 IEM_MC_REL_JMP_S32(i32Imm);
3541 } IEM_MC_ELSE() {
3542 IEM_MC_ADVANCE_RIP();
3543 } IEM_MC_ENDIF();
3544 IEM_MC_END();
3545 }
3546 return VINF_SUCCESS;
3547}
3548
3549
3550/** Opcode 0x0f 0x81. */
3551FNIEMOP_DEF(iemOp_jno_Jv)
3552{
3553 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
3554 IEMOP_HLP_MIN_386();
3555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3556 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3557 {
3558 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S16(i16Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568 }
3569 else
3570 {
3571 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S32(i32Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** Opcode 0x0f 0x82. */
3587FNIEMOP_DEF(iemOp_jc_Jv)
3588{
3589 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
3590 IEMOP_HLP_MIN_386();
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3592 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3593 {
3594 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596
3597 IEM_MC_BEGIN(0, 0);
3598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3599 IEM_MC_REL_JMP_S16(i16Imm);
3600 } IEM_MC_ELSE() {
3601 IEM_MC_ADVANCE_RIP();
3602 } IEM_MC_ENDIF();
3603 IEM_MC_END();
3604 }
3605 else
3606 {
3607 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3609
3610 IEM_MC_BEGIN(0, 0);
3611 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3612 IEM_MC_REL_JMP_S32(i32Imm);
3613 } IEM_MC_ELSE() {
3614 IEM_MC_ADVANCE_RIP();
3615 } IEM_MC_ENDIF();
3616 IEM_MC_END();
3617 }
3618 return VINF_SUCCESS;
3619}
3620
3621
3622/** Opcode 0x0f 0x83. */
3623FNIEMOP_DEF(iemOp_jnc_Jv)
3624{
3625 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
3626 IEMOP_HLP_MIN_386();
3627 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3628 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3629 {
3630 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3635 IEM_MC_ADVANCE_RIP();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S16(i16Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 else
3642 {
3643 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3645
3646 IEM_MC_BEGIN(0, 0);
3647 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3648 IEM_MC_ADVANCE_RIP();
3649 } IEM_MC_ELSE() {
3650 IEM_MC_REL_JMP_S32(i32Imm);
3651 } IEM_MC_ENDIF();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/** Opcode 0x0f 0x84. */
3659FNIEMOP_DEF(iemOp_je_Jv)
3660{
3661 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
3662 IEMOP_HLP_MIN_386();
3663 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3664 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3665 {
3666 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3668
3669 IEM_MC_BEGIN(0, 0);
3670 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3671 IEM_MC_REL_JMP_S16(i16Imm);
3672 } IEM_MC_ELSE() {
3673 IEM_MC_ADVANCE_RIP();
3674 } IEM_MC_ENDIF();
3675 IEM_MC_END();
3676 }
3677 else
3678 {
3679 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3684 IEM_MC_REL_JMP_S32(i32Imm);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_ADVANCE_RIP();
3687 } IEM_MC_ENDIF();
3688 IEM_MC_END();
3689 }
3690 return VINF_SUCCESS;
3691}
3692
3693
3694/** Opcode 0x0f 0x85. */
3695FNIEMOP_DEF(iemOp_jne_Jv)
3696{
3697 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
3698 IEMOP_HLP_MIN_386();
3699 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3700 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3701 {
3702 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704
3705 IEM_MC_BEGIN(0, 0);
3706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3707 IEM_MC_ADVANCE_RIP();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_REL_JMP_S16(i16Imm);
3710 } IEM_MC_ENDIF();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3720 IEM_MC_ADVANCE_RIP();
3721 } IEM_MC_ELSE() {
3722 IEM_MC_REL_JMP_S32(i32Imm);
3723 } IEM_MC_ENDIF();
3724 IEM_MC_END();
3725 }
3726 return VINF_SUCCESS;
3727}
3728
3729
3730/** Opcode 0x0f 0x86. */
3731FNIEMOP_DEF(iemOp_jbe_Jv)
3732{
3733 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
3734 IEMOP_HLP_MIN_386();
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3737 {
3738 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740
3741 IEM_MC_BEGIN(0, 0);
3742 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3743 IEM_MC_REL_JMP_S16(i16Imm);
3744 } IEM_MC_ELSE() {
3745 IEM_MC_ADVANCE_RIP();
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748 }
3749 else
3750 {
3751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0);
3755 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3756 IEM_MC_REL_JMP_S32(i32Imm);
3757 } IEM_MC_ELSE() {
3758 IEM_MC_ADVANCE_RIP();
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761 }
3762 return VINF_SUCCESS;
3763}
3764
3765
3766/** Opcode 0x0f 0x87. */
3767FNIEMOP_DEF(iemOp_jnbe_Jv)
3768{
3769 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
3770 IEMOP_HLP_MIN_386();
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3772 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3773 {
3774 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776
3777 IEM_MC_BEGIN(0, 0);
3778 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3779 IEM_MC_ADVANCE_RIP();
3780 } IEM_MC_ELSE() {
3781 IEM_MC_REL_JMP_S16(i16Imm);
3782 } IEM_MC_ENDIF();
3783 IEM_MC_END();
3784 }
3785 else
3786 {
3787 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789
3790 IEM_MC_BEGIN(0, 0);
3791 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3792 IEM_MC_ADVANCE_RIP();
3793 } IEM_MC_ELSE() {
3794 IEM_MC_REL_JMP_S32(i32Imm);
3795 } IEM_MC_ENDIF();
3796 IEM_MC_END();
3797 }
3798 return VINF_SUCCESS;
3799}
3800
3801
3802/** Opcode 0x0f 0x88. */
3803FNIEMOP_DEF(iemOp_js_Jv)
3804{
3805 IEMOP_MNEMONIC(js_Jv, "js Jv");
3806 IEMOP_HLP_MIN_386();
3807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3809 {
3810 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3815 IEM_MC_REL_JMP_S16(i16Imm);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_ADVANCE_RIP();
3818 } IEM_MC_ENDIF();
3819 IEM_MC_END();
3820 }
3821 else
3822 {
3823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825
3826 IEM_MC_BEGIN(0, 0);
3827 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3828 IEM_MC_REL_JMP_S32(i32Imm);
3829 } IEM_MC_ELSE() {
3830 IEM_MC_ADVANCE_RIP();
3831 } IEM_MC_ENDIF();
3832 IEM_MC_END();
3833 }
3834 return VINF_SUCCESS;
3835}
3836
3837
3838/** Opcode 0x0f 0x89. */
3839FNIEMOP_DEF(iemOp_jns_Jv)
3840{
3841 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
3842 IEMOP_HLP_MIN_386();
3843 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3844 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3845 {
3846 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3848
3849 IEM_MC_BEGIN(0, 0);
3850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3851 IEM_MC_ADVANCE_RIP();
3852 } IEM_MC_ELSE() {
3853 IEM_MC_REL_JMP_S16(i16Imm);
3854 } IEM_MC_ENDIF();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0);
3863 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3864 IEM_MC_ADVANCE_RIP();
3865 } IEM_MC_ELSE() {
3866 IEM_MC_REL_JMP_S32(i32Imm);
3867 } IEM_MC_ENDIF();
3868 IEM_MC_END();
3869 }
3870 return VINF_SUCCESS;
3871}
3872
3873
3874/** Opcode 0x0f 0x8a. */
3875FNIEMOP_DEF(iemOp_jp_Jv)
3876{
3877 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
3878 IEMOP_HLP_MIN_386();
3879 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3880 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3881 {
3882 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884
3885 IEM_MC_BEGIN(0, 0);
3886 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3887 IEM_MC_REL_JMP_S16(i16Imm);
3888 } IEM_MC_ELSE() {
3889 IEM_MC_ADVANCE_RIP();
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892 }
3893 else
3894 {
3895 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3897
3898 IEM_MC_BEGIN(0, 0);
3899 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3900 IEM_MC_REL_JMP_S32(i32Imm);
3901 } IEM_MC_ELSE() {
3902 IEM_MC_ADVANCE_RIP();
3903 } IEM_MC_ENDIF();
3904 IEM_MC_END();
3905 }
3906 return VINF_SUCCESS;
3907}
3908
3909
3910/** Opcode 0x0f 0x8b. */
3911FNIEMOP_DEF(iemOp_jnp_Jv)
3912{
3913 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
3914 IEMOP_HLP_MIN_386();
3915 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3916 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3917 {
3918 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3920
3921 IEM_MC_BEGIN(0, 0);
3922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3923 IEM_MC_ADVANCE_RIP();
3924 } IEM_MC_ELSE() {
3925 IEM_MC_REL_JMP_S16(i16Imm);
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928 }
3929 else
3930 {
3931 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0);
3935 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3936 IEM_MC_ADVANCE_RIP();
3937 } IEM_MC_ELSE() {
3938 IEM_MC_REL_JMP_S32(i32Imm);
3939 } IEM_MC_ENDIF();
3940 IEM_MC_END();
3941 }
3942 return VINF_SUCCESS;
3943}
3944
3945
3946/** Opcode 0x0f 0x8c. */
3947FNIEMOP_DEF(iemOp_jl_Jv)
3948{
3949 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
3950 IEMOP_HLP_MIN_386();
3951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3952 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3953 {
3954 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3956
3957 IEM_MC_BEGIN(0, 0);
3958 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3959 IEM_MC_REL_JMP_S16(i16Imm);
3960 } IEM_MC_ELSE() {
3961 IEM_MC_ADVANCE_RIP();
3962 } IEM_MC_ENDIF();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3969
3970 IEM_MC_BEGIN(0, 0);
3971 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3972 IEM_MC_REL_JMP_S32(i32Imm);
3973 } IEM_MC_ELSE() {
3974 IEM_MC_ADVANCE_RIP();
3975 } IEM_MC_ENDIF();
3976 IEM_MC_END();
3977 }
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** Opcode 0x0f 0x8d. */
3983FNIEMOP_DEF(iemOp_jnl_Jv)
3984{
3985 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
3986 IEMOP_HLP_MIN_386();
3987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3988 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
3989 {
3990 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3992
3993 IEM_MC_BEGIN(0, 0);
3994 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3995 IEM_MC_ADVANCE_RIP();
3996 } IEM_MC_ELSE() {
3997 IEM_MC_REL_JMP_S16(i16Imm);
3998 } IEM_MC_ENDIF();
3999 IEM_MC_END();
4000 }
4001 else
4002 {
4003 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4005
4006 IEM_MC_BEGIN(0, 0);
4007 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4008 IEM_MC_ADVANCE_RIP();
4009 } IEM_MC_ELSE() {
4010 IEM_MC_REL_JMP_S32(i32Imm);
4011 } IEM_MC_ENDIF();
4012 IEM_MC_END();
4013 }
4014 return VINF_SUCCESS;
4015}
4016
4017
4018/** Opcode 0x0f 0x8e. */
4019FNIEMOP_DEF(iemOp_jle_Jv)
4020{
4021 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
4022 IEMOP_HLP_MIN_386();
4023 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4024 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4025 {
4026 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4028
4029 IEM_MC_BEGIN(0, 0);
4030 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4031 IEM_MC_REL_JMP_S16(i16Imm);
4032 } IEM_MC_ELSE() {
4033 IEM_MC_ADVANCE_RIP();
4034 } IEM_MC_ENDIF();
4035 IEM_MC_END();
4036 }
4037 else
4038 {
4039 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4041
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4044 IEM_MC_REL_JMP_S32(i32Imm);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_ADVANCE_RIP();
4047 } IEM_MC_ENDIF();
4048 IEM_MC_END();
4049 }
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** Opcode 0x0f 0x8f. */
4055FNIEMOP_DEF(iemOp_jnle_Jv)
4056{
4057 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
4058 IEMOP_HLP_MIN_386();
4059 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4060 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
4061 {
4062 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4064
4065 IEM_MC_BEGIN(0, 0);
4066 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4067 IEM_MC_ADVANCE_RIP();
4068 } IEM_MC_ELSE() {
4069 IEM_MC_REL_JMP_S16(i16Imm);
4070 } IEM_MC_ENDIF();
4071 IEM_MC_END();
4072 }
4073 else
4074 {
4075 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
4076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4077
4078 IEM_MC_BEGIN(0, 0);
4079 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4080 IEM_MC_ADVANCE_RIP();
4081 } IEM_MC_ELSE() {
4082 IEM_MC_REL_JMP_S32(i32Imm);
4083 } IEM_MC_ENDIF();
4084 IEM_MC_END();
4085 }
4086 return VINF_SUCCESS;
4087}
4088
4089
4090/** Opcode 0x0f 0x90. */
4091FNIEMOP_DEF(iemOp_seto_Eb)
4092{
4093 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
4094 IEMOP_HLP_MIN_386();
4095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4096
4097 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4098 * any way. AMD says it's "unused", whatever that means. We're
4099 * ignoring for now. */
4100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4101 {
4102 /* register target */
4103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4104 IEM_MC_BEGIN(0, 0);
4105 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4106 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4109 } IEM_MC_ENDIF();
4110 IEM_MC_ADVANCE_RIP();
4111 IEM_MC_END();
4112 }
4113 else
4114 {
4115 /* memory target */
4116 IEM_MC_BEGIN(0, 1);
4117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4121 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4124 } IEM_MC_ENDIF();
4125 IEM_MC_ADVANCE_RIP();
4126 IEM_MC_END();
4127 }
4128 return VINF_SUCCESS;
4129}
4130
4131
4132/** Opcode 0x0f 0x91. */
4133FNIEMOP_DEF(iemOp_setno_Eb)
4134{
4135 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
4136 IEMOP_HLP_MIN_386();
4137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4138
4139 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4140 * any way. AMD says it's "unused", whatever that means. We're
4141 * ignoring for now. */
4142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4143 {
4144 /* register target */
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4146 IEM_MC_BEGIN(0, 0);
4147 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4148 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4149 } IEM_MC_ELSE() {
4150 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4151 } IEM_MC_ENDIF();
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 }
4155 else
4156 {
4157 /* memory target */
4158 IEM_MC_BEGIN(0, 1);
4159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4162 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
4163 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4164 } IEM_MC_ELSE() {
4165 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4166 } IEM_MC_ENDIF();
4167 IEM_MC_ADVANCE_RIP();
4168 IEM_MC_END();
4169 }
4170 return VINF_SUCCESS;
4171}
4172
4173
4174/** Opcode 0x0f 0x92. */
4175FNIEMOP_DEF(iemOp_setc_Eb)
4176{
4177 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
4178 IEMOP_HLP_MIN_386();
4179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4180
4181 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4182 * any way. AMD says it's "unused", whatever that means. We're
4183 * ignoring for now. */
4184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4185 {
4186 /* register target */
4187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4188 IEM_MC_BEGIN(0, 0);
4189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4190 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4191 } IEM_MC_ELSE() {
4192 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4193 } IEM_MC_ENDIF();
4194 IEM_MC_ADVANCE_RIP();
4195 IEM_MC_END();
4196 }
4197 else
4198 {
4199 /* memory target */
4200 IEM_MC_BEGIN(0, 1);
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4204 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4205 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4206 } IEM_MC_ELSE() {
4207 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4208 } IEM_MC_ENDIF();
4209 IEM_MC_ADVANCE_RIP();
4210 IEM_MC_END();
4211 }
4212 return VINF_SUCCESS;
4213}
4214
4215
4216/** Opcode 0x0f 0x93. */
4217FNIEMOP_DEF(iemOp_setnc_Eb)
4218{
4219 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
4220 IEMOP_HLP_MIN_386();
4221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4222
4223 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4224 * any way. AMD says it's "unused", whatever that means. We're
4225 * ignoring for now. */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 /* register target */
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4230 IEM_MC_BEGIN(0, 0);
4231 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4232 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4233 } IEM_MC_ELSE() {
4234 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4235 } IEM_MC_ENDIF();
4236 IEM_MC_ADVANCE_RIP();
4237 IEM_MC_END();
4238 }
4239 else
4240 {
4241 /* memory target */
4242 IEM_MC_BEGIN(0, 1);
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
4247 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4248 } IEM_MC_ELSE() {
4249 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4250 } IEM_MC_ENDIF();
4251 IEM_MC_ADVANCE_RIP();
4252 IEM_MC_END();
4253 }
4254 return VINF_SUCCESS;
4255}
4256
4257
4258/** Opcode 0x0f 0x94. */
4259FNIEMOP_DEF(iemOp_sete_Eb)
4260{
4261 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
4262 IEMOP_HLP_MIN_386();
4263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4264
4265 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4266 * any way. AMD says it's "unused", whatever that means. We're
4267 * ignoring for now. */
4268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4269 {
4270 /* register target */
4271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4272 IEM_MC_BEGIN(0, 0);
4273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4274 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4275 } IEM_MC_ELSE() {
4276 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4277 } IEM_MC_ENDIF();
4278 IEM_MC_ADVANCE_RIP();
4279 IEM_MC_END();
4280 }
4281 else
4282 {
4283 /* memory target */
4284 IEM_MC_BEGIN(0, 1);
4285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4289 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4290 } IEM_MC_ELSE() {
4291 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4292 } IEM_MC_ENDIF();
4293 IEM_MC_ADVANCE_RIP();
4294 IEM_MC_END();
4295 }
4296 return VINF_SUCCESS;
4297}
4298
4299
4300/** Opcode 0x0f 0x95. */
4301FNIEMOP_DEF(iemOp_setne_Eb)
4302{
4303 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
4304 IEMOP_HLP_MIN_386();
4305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4306
4307 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4308 * any way. AMD says it's "unused", whatever that means. We're
4309 * ignoring for now. */
4310 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4311 {
4312 /* register target */
4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4314 IEM_MC_BEGIN(0, 0);
4315 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4316 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4319 } IEM_MC_ENDIF();
4320 IEM_MC_ADVANCE_RIP();
4321 IEM_MC_END();
4322 }
4323 else
4324 {
4325 /* memory target */
4326 IEM_MC_BEGIN(0, 1);
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
4331 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4332 } IEM_MC_ELSE() {
4333 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4334 } IEM_MC_ENDIF();
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 }
4338 return VINF_SUCCESS;
4339}
4340
4341
4342/** Opcode 0x0f 0x96. */
4343FNIEMOP_DEF(iemOp_setbe_Eb)
4344{
4345 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
4346 IEMOP_HLP_MIN_386();
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348
4349 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4350 * any way. AMD says it's "unused", whatever that means. We're
4351 * ignoring for now. */
4352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4353 {
4354 /* register target */
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4356 IEM_MC_BEGIN(0, 0);
4357 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4358 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4359 } IEM_MC_ELSE() {
4360 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4361 } IEM_MC_ENDIF();
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 }
4365 else
4366 {
4367 /* memory target */
4368 IEM_MC_BEGIN(0, 1);
4369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4372 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4373 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4374 } IEM_MC_ELSE() {
4375 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4376 } IEM_MC_ENDIF();
4377 IEM_MC_ADVANCE_RIP();
4378 IEM_MC_END();
4379 }
4380 return VINF_SUCCESS;
4381}
4382
4383
4384/** Opcode 0x0f 0x97. */
4385FNIEMOP_DEF(iemOp_setnbe_Eb)
4386{
4387 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
4388 IEMOP_HLP_MIN_386();
4389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4390
4391 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4392 * any way. AMD says it's "unused", whatever that means. We're
4393 * ignoring for now. */
4394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4395 {
4396 /* register target */
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398 IEM_MC_BEGIN(0, 0);
4399 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4400 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4401 } IEM_MC_ELSE() {
4402 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4403 } IEM_MC_ENDIF();
4404 IEM_MC_ADVANCE_RIP();
4405 IEM_MC_END();
4406 }
4407 else
4408 {
4409 /* memory target */
4410 IEM_MC_BEGIN(0, 1);
4411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4414 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
4415 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4416 } IEM_MC_ELSE() {
4417 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4418 } IEM_MC_ENDIF();
4419 IEM_MC_ADVANCE_RIP();
4420 IEM_MC_END();
4421 }
4422 return VINF_SUCCESS;
4423}
4424
4425
4426/** Opcode 0x0f 0x98. */
4427FNIEMOP_DEF(iemOp_sets_Eb)
4428{
4429 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
4430 IEMOP_HLP_MIN_386();
4431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4432
4433 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4434 * any way. AMD says it's "unused", whatever that means. We're
4435 * ignoring for now. */
4436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4437 {
4438 /* register target */
4439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4440 IEM_MC_BEGIN(0, 0);
4441 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4442 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4443 } IEM_MC_ELSE() {
4444 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4445 } IEM_MC_ENDIF();
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* memory target */
4452 IEM_MC_BEGIN(0, 1);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4457 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4458 } IEM_MC_ELSE() {
4459 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4460 } IEM_MC_ENDIF();
4461 IEM_MC_ADVANCE_RIP();
4462 IEM_MC_END();
4463 }
4464 return VINF_SUCCESS;
4465}
4466
4467
4468/** Opcode 0x0f 0x99. */
4469FNIEMOP_DEF(iemOp_setns_Eb)
4470{
4471 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
4472 IEMOP_HLP_MIN_386();
4473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4474
4475 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4476 * any way. AMD says it's "unused", whatever that means. We're
4477 * ignoring for now. */
4478 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4479 {
4480 /* register target */
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4482 IEM_MC_BEGIN(0, 0);
4483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4484 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4485 } IEM_MC_ELSE() {
4486 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4487 } IEM_MC_ENDIF();
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 }
4491 else
4492 {
4493 /* memory target */
4494 IEM_MC_BEGIN(0, 1);
4495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4498 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4499 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4500 } IEM_MC_ELSE() {
4501 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4502 } IEM_MC_ENDIF();
4503 IEM_MC_ADVANCE_RIP();
4504 IEM_MC_END();
4505 }
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** Opcode 0x0f 0x9a. */
4511FNIEMOP_DEF(iemOp_setp_Eb)
4512{
4513 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
4514 IEMOP_HLP_MIN_386();
4515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4516
4517 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4518 * any way. AMD says it's "unused", whatever that means. We're
4519 * ignoring for now. */
4520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4521 {
4522 /* register target */
4523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4524 IEM_MC_BEGIN(0, 0);
4525 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4526 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4527 } IEM_MC_ELSE() {
4528 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4529 } IEM_MC_ENDIF();
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 }
4533 else
4534 {
4535 /* memory target */
4536 IEM_MC_BEGIN(0, 1);
4537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4540 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4541 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4542 } IEM_MC_ELSE() {
4543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4544 } IEM_MC_ENDIF();
4545 IEM_MC_ADVANCE_RIP();
4546 IEM_MC_END();
4547 }
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** Opcode 0x0f 0x9b. */
4553FNIEMOP_DEF(iemOp_setnp_Eb)
4554{
4555 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
4556 IEMOP_HLP_MIN_386();
4557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4558
4559 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4560 * any way. AMD says it's "unused", whatever that means. We're
4561 * ignoring for now. */
4562 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4563 {
4564 /* register target */
4565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4566 IEM_MC_BEGIN(0, 0);
4567 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4568 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4569 } IEM_MC_ELSE() {
4570 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4571 } IEM_MC_ENDIF();
4572 IEM_MC_ADVANCE_RIP();
4573 IEM_MC_END();
4574 }
4575 else
4576 {
4577 /* memory target */
4578 IEM_MC_BEGIN(0, 1);
4579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4582 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4583 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4584 } IEM_MC_ELSE() {
4585 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4586 } IEM_MC_ENDIF();
4587 IEM_MC_ADVANCE_RIP();
4588 IEM_MC_END();
4589 }
4590 return VINF_SUCCESS;
4591}
4592
4593
4594/** Opcode 0x0f 0x9c. */
4595FNIEMOP_DEF(iemOp_setl_Eb)
4596{
4597 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
4598 IEMOP_HLP_MIN_386();
4599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4600
4601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4602 * any way. AMD says it's "unused", whatever that means. We're
4603 * ignoring for now. */
4604 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4605 {
4606 /* register target */
4607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4608 IEM_MC_BEGIN(0, 0);
4609 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4610 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4611 } IEM_MC_ELSE() {
4612 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4613 } IEM_MC_ENDIF();
4614 IEM_MC_ADVANCE_RIP();
4615 IEM_MC_END();
4616 }
4617 else
4618 {
4619 /* memory target */
4620 IEM_MC_BEGIN(0, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4628 } IEM_MC_ENDIF();
4629 IEM_MC_ADVANCE_RIP();
4630 IEM_MC_END();
4631 }
4632 return VINF_SUCCESS;
4633}
4634
4635
4636/** Opcode 0x0f 0x9d. */
4637FNIEMOP_DEF(iemOp_setnl_Eb)
4638{
4639 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
4640 IEMOP_HLP_MIN_386();
4641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4642
4643 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4644 * any way. AMD says it's "unused", whatever that means. We're
4645 * ignoring for now. */
4646 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4647 {
4648 /* register target */
4649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4650 IEM_MC_BEGIN(0, 0);
4651 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4652 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4653 } IEM_MC_ELSE() {
4654 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4655 } IEM_MC_ENDIF();
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 }
4659 else
4660 {
4661 /* memory target */
4662 IEM_MC_BEGIN(0, 1);
4663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4666 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4667 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4668 } IEM_MC_ELSE() {
4669 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4670 } IEM_MC_ENDIF();
4671 IEM_MC_ADVANCE_RIP();
4672 IEM_MC_END();
4673 }
4674 return VINF_SUCCESS;
4675}
4676
4677
4678/** Opcode 0x0f 0x9e. */
4679FNIEMOP_DEF(iemOp_setle_Eb)
4680{
4681 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
4682 IEMOP_HLP_MIN_386();
4683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4684
4685 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4686 * any way. AMD says it's "unused", whatever that means. We're
4687 * ignoring for now. */
4688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4689 {
4690 /* register target */
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692 IEM_MC_BEGIN(0, 0);
4693 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4694 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4695 } IEM_MC_ELSE() {
4696 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4697 } IEM_MC_ENDIF();
4698 IEM_MC_ADVANCE_RIP();
4699 IEM_MC_END();
4700 }
4701 else
4702 {
4703 /* memory target */
4704 IEM_MC_BEGIN(0, 1);
4705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4708 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4710 } IEM_MC_ELSE() {
4711 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4712 } IEM_MC_ENDIF();
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 }
4716 return VINF_SUCCESS;
4717}
4718
4719
4720/** Opcode 0x0f 0x9f. */
4721FNIEMOP_DEF(iemOp_setnle_Eb)
4722{
4723 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
4724 IEMOP_HLP_MIN_386();
4725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4728 * any way. AMD says it's "unused", whatever that means. We're
4729 * ignoring for now. */
4730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4731 {
4732 /* register target */
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_BEGIN(0, 0);
4735 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4736 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0);
4737 } IEM_MC_ELSE() {
4738 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1);
4739 } IEM_MC_ENDIF();
4740 IEM_MC_ADVANCE_RIP();
4741 IEM_MC_END();
4742 }
4743 else
4744 {
4745 /* memory target */
4746 IEM_MC_BEGIN(0, 1);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4751 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4752 } IEM_MC_ELSE() {
4753 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
4754 } IEM_MC_ENDIF();
4755 IEM_MC_ADVANCE_RIP();
4756 IEM_MC_END();
4757 }
4758 return VINF_SUCCESS;
4759}
4760
4761
4762/**
4763 * Common 'push segment-register' helper.
4764 */
4765FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768 if (iReg < X86_SREG_FS)
4769 IEMOP_HLP_NO_64BIT();
4770 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4771
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEM_MC_LOCAL(uint16_t, u16Value);
4777 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4778 IEM_MC_PUSH_U16(u16Value);
4779 IEM_MC_ADVANCE_RIP();
4780 IEM_MC_END();
4781 break;
4782
4783 case IEMMODE_32BIT:
4784 IEM_MC_BEGIN(0, 1);
4785 IEM_MC_LOCAL(uint32_t, u32Value);
4786 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4787 IEM_MC_PUSH_U32_SREG(u32Value);
4788 IEM_MC_ADVANCE_RIP();
4789 IEM_MC_END();
4790 break;
4791
4792 case IEMMODE_64BIT:
4793 IEM_MC_BEGIN(0, 1);
4794 IEM_MC_LOCAL(uint64_t, u64Value);
4795 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4796 IEM_MC_PUSH_U64(u64Value);
4797 IEM_MC_ADVANCE_RIP();
4798 IEM_MC_END();
4799 break;
4800 }
4801
4802 return VINF_SUCCESS;
4803}
4804
4805
4806/** Opcode 0x0f 0xa0. */
4807FNIEMOP_DEF(iemOp_push_fs)
4808{
4809 IEMOP_MNEMONIC(push_fs, "push fs");
4810 IEMOP_HLP_MIN_386();
4811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4812 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4813}
4814
4815
4816/** Opcode 0x0f 0xa1. */
4817FNIEMOP_DEF(iemOp_pop_fs)
4818{
4819 IEMOP_MNEMONIC(pop_fs, "pop fs");
4820 IEMOP_HLP_MIN_386();
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4822 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
4823}
4824
4825
4826/** Opcode 0x0f 0xa2. */
4827FNIEMOP_DEF(iemOp_cpuid)
4828{
4829 IEMOP_MNEMONIC(cpuid, "cpuid");
4830 IEMOP_HLP_MIN_486(); /* not all 486es. */
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4833}
4834
4835
4836/**
4837 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4838 * iemOp_bts_Ev_Gv.
4839 */
4840FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4841{
4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4843 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4844
4845 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4846 {
4847 /* register destination. */
4848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4849 switch (pVCpu->iem.s.enmEffOpSize)
4850 {
4851 case IEMMODE_16BIT:
4852 IEM_MC_BEGIN(3, 0);
4853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4854 IEM_MC_ARG(uint16_t, u16Src, 1);
4855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4856
4857 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4858 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 return VINF_SUCCESS;
4866
4867 case IEMMODE_32BIT:
4868 IEM_MC_BEGIN(3, 0);
4869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4870 IEM_MC_ARG(uint32_t, u32Src, 1);
4871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4872
4873 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4874 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4875 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4876 IEM_MC_REF_EFLAGS(pEFlags);
4877 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4878
4879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883
4884 case IEMMODE_64BIT:
4885 IEM_MC_BEGIN(3, 0);
4886 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4887 IEM_MC_ARG(uint64_t, u64Src, 1);
4888 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4889
4890 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4891 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4893 IEM_MC_REF_EFLAGS(pEFlags);
4894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4895
4896 IEM_MC_ADVANCE_RIP();
4897 IEM_MC_END();
4898 return VINF_SUCCESS;
4899
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 }
4903 else
4904 {
4905 /* memory destination. */
4906
4907 uint32_t fAccess;
4908 if (pImpl->pfnLockedU16)
4909 fAccess = IEM_ACCESS_DATA_RW;
4910 else /* BT */
4911 fAccess = IEM_ACCESS_DATA_R;
4912
4913 /** @todo test negative bit offsets! */
4914 switch (pVCpu->iem.s.enmEffOpSize)
4915 {
4916 case IEMMODE_16BIT:
4917 IEM_MC_BEGIN(3, 2);
4918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4919 IEM_MC_ARG(uint16_t, u16Src, 1);
4920 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4922 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4923
4924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4925 if (pImpl->pfnLockedU16)
4926 IEMOP_HLP_DONE_DECODING();
4927 else
4928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4929 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4930 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4931 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4932 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4933 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4934 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4935 IEM_MC_FETCH_EFLAGS(EFlags);
4936
4937 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4938 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4939 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4940 else
4941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4942 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
4943
4944 IEM_MC_COMMIT_EFLAGS(EFlags);
4945 IEM_MC_ADVANCE_RIP();
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948
4949 case IEMMODE_32BIT:
4950 IEM_MC_BEGIN(3, 2);
4951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4952 IEM_MC_ARG(uint32_t, u32Src, 1);
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4955 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4956
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4958 if (pImpl->pfnLockedU16)
4959 IEMOP_HLP_DONE_DECODING();
4960 else
4961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4962 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4963 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4964 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4965 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4966 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4967 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4968 IEM_MC_FETCH_EFLAGS(EFlags);
4969
4970 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
4971 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
4972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4973 else
4974 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4975 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
4976
4977 IEM_MC_COMMIT_EFLAGS(EFlags);
4978 IEM_MC_ADVANCE_RIP();
4979 IEM_MC_END();
4980 return VINF_SUCCESS;
4981
4982 case IEMMODE_64BIT:
4983 IEM_MC_BEGIN(3, 2);
4984 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4985 IEM_MC_ARG(uint64_t, u64Src, 1);
4986 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4988 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4989
4990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4991 if (pImpl->pfnLockedU16)
4992 IEMOP_HLP_DONE_DECODING();
4993 else
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
4996 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4997 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4998 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4999 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
5000 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
5001 IEM_MC_FETCH_EFLAGS(EFlags);
5002
5003 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5004 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5005 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5006 else
5007 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5008 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5009
5010 IEM_MC_COMMIT_EFLAGS(EFlags);
5011 IEM_MC_ADVANCE_RIP();
5012 IEM_MC_END();
5013 return VINF_SUCCESS;
5014
5015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5016 }
5017 }
5018}
5019
5020
5021/** Opcode 0x0f 0xa3. */
5022FNIEMOP_DEF(iemOp_bt_Ev_Gv)
5023{
5024 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
5025 IEMOP_HLP_MIN_386();
5026 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
5027}
5028
5029
5030/**
5031 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
5032 */
5033FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
5034{
5035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5036 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5037
5038 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5039 {
5040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042
5043 switch (pVCpu->iem.s.enmEffOpSize)
5044 {
5045 case IEMMODE_16BIT:
5046 IEM_MC_BEGIN(4, 0);
5047 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5048 IEM_MC_ARG(uint16_t, u16Src, 1);
5049 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5050 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5051
5052 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5053 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5054 IEM_MC_REF_EFLAGS(pEFlags);
5055 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5056
5057 IEM_MC_ADVANCE_RIP();
5058 IEM_MC_END();
5059 return VINF_SUCCESS;
5060
5061 case IEMMODE_32BIT:
5062 IEM_MC_BEGIN(4, 0);
5063 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5064 IEM_MC_ARG(uint32_t, u32Src, 1);
5065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5066 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5067
5068 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5069 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5070 IEM_MC_REF_EFLAGS(pEFlags);
5071 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5072
5073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5074 IEM_MC_ADVANCE_RIP();
5075 IEM_MC_END();
5076 return VINF_SUCCESS;
5077
5078 case IEMMODE_64BIT:
5079 IEM_MC_BEGIN(4, 0);
5080 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5081 IEM_MC_ARG(uint64_t, u64Src, 1);
5082 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
5083 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5084
5085 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5086 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5087 IEM_MC_REF_EFLAGS(pEFlags);
5088 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5089
5090 IEM_MC_ADVANCE_RIP();
5091 IEM_MC_END();
5092 return VINF_SUCCESS;
5093
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 }
5097 else
5098 {
5099 switch (pVCpu->iem.s.enmEffOpSize)
5100 {
5101 case IEMMODE_16BIT:
5102 IEM_MC_BEGIN(4, 2);
5103 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5104 IEM_MC_ARG(uint16_t, u16Src, 1);
5105 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5108
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5110 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5111 IEM_MC_ASSIGN(cShiftArg, cShift);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5113 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5114 IEM_MC_FETCH_EFLAGS(EFlags);
5115 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5116 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5117
5118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5119 IEM_MC_COMMIT_EFLAGS(EFlags);
5120 IEM_MC_ADVANCE_RIP();
5121 IEM_MC_END();
5122 return VINF_SUCCESS;
5123
5124 case IEMMODE_32BIT:
5125 IEM_MC_BEGIN(4, 2);
5126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5127 IEM_MC_ARG(uint32_t, u32Src, 1);
5128 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5129 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5131
5132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5134 IEM_MC_ASSIGN(cShiftArg, cShift);
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5137 IEM_MC_FETCH_EFLAGS(EFlags);
5138 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5139 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5140
5141 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5142 IEM_MC_COMMIT_EFLAGS(EFlags);
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 return VINF_SUCCESS;
5146
5147 case IEMMODE_64BIT:
5148 IEM_MC_BEGIN(4, 2);
5149 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5150 IEM_MC_ARG(uint64_t, u64Src, 1);
5151 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5154
5155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5156 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5157 IEM_MC_ASSIGN(cShiftArg, cShift);
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5160 IEM_MC_FETCH_EFLAGS(EFlags);
5161 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5162 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5163
5164 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5165 IEM_MC_COMMIT_EFLAGS(EFlags);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 return VINF_SUCCESS;
5169
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173}
5174
5175
5176/**
5177 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
5178 */
5179FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
5180{
5181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5182 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
5183
5184 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5185 {
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187
5188 switch (pVCpu->iem.s.enmEffOpSize)
5189 {
5190 case IEMMODE_16BIT:
5191 IEM_MC_BEGIN(4, 0);
5192 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5193 IEM_MC_ARG(uint16_t, u16Src, 1);
5194 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5196
5197 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5198 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5199 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5200 IEM_MC_REF_EFLAGS(pEFlags);
5201 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5202
5203 IEM_MC_ADVANCE_RIP();
5204 IEM_MC_END();
5205 return VINF_SUCCESS;
5206
5207 case IEMMODE_32BIT:
5208 IEM_MC_BEGIN(4, 0);
5209 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5210 IEM_MC_ARG(uint32_t, u32Src, 1);
5211 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5212 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5213
5214 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5215 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5216 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5217 IEM_MC_REF_EFLAGS(pEFlags);
5218 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5219
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t, u64Src, 1);
5229 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5230 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5231
5232 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5233 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5234 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5235 IEM_MC_REF_EFLAGS(pEFlags);
5236 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5237
5238 IEM_MC_ADVANCE_RIP();
5239 IEM_MC_END();
5240 return VINF_SUCCESS;
5241
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 }
5245 else
5246 {
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 IEM_MC_BEGIN(4, 2);
5251 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5252 IEM_MC_ARG(uint16_t, u16Src, 1);
5253 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5256
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5260 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5261 IEM_MC_FETCH_EFLAGS(EFlags);
5262 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5263 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
5264
5265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5266 IEM_MC_COMMIT_EFLAGS(EFlags);
5267 IEM_MC_ADVANCE_RIP();
5268 IEM_MC_END();
5269 return VINF_SUCCESS;
5270
5271 case IEMMODE_32BIT:
5272 IEM_MC_BEGIN(4, 2);
5273 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5274 IEM_MC_ARG(uint32_t, u32Src, 1);
5275 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5276 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5278
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5281 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5282 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5283 IEM_MC_FETCH_EFLAGS(EFlags);
5284 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5285 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
5286
5287 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5288 IEM_MC_COMMIT_EFLAGS(EFlags);
5289 IEM_MC_ADVANCE_RIP();
5290 IEM_MC_END();
5291 return VINF_SUCCESS;
5292
5293 case IEMMODE_64BIT:
5294 IEM_MC_BEGIN(4, 2);
5295 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5296 IEM_MC_ARG(uint64_t, u64Src, 1);
5297 IEM_MC_ARG(uint8_t, cShiftArg, 2);
5298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5303 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5304 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5305 IEM_MC_FETCH_EFLAGS(EFlags);
5306 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5307 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
5308
5309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5310 IEM_MC_COMMIT_EFLAGS(EFlags);
5311 IEM_MC_ADVANCE_RIP();
5312 IEM_MC_END();
5313 return VINF_SUCCESS;
5314
5315 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5316 }
5317 }
5318}
5319
5320
5321
5322/** Opcode 0x0f 0xa4. */
5323FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
5324{
5325 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
5326 IEMOP_HLP_MIN_386();
5327 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
5328}
5329
5330
5331/** Opcode 0x0f 0xa5. */
5332FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
5333{
5334 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
5335 IEMOP_HLP_MIN_386();
5336 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
5337}
5338
5339
5340/** Opcode 0x0f 0xa8. */
5341FNIEMOP_DEF(iemOp_push_gs)
5342{
5343 IEMOP_MNEMONIC(push_gs, "push gs");
5344 IEMOP_HLP_MIN_386();
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
5347}
5348
5349
5350/** Opcode 0x0f 0xa9. */
5351FNIEMOP_DEF(iemOp_pop_gs)
5352{
5353 IEMOP_MNEMONIC(pop_gs, "pop gs");
5354 IEMOP_HLP_MIN_386();
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
5357}
5358
5359
5360/** Opcode 0x0f 0xaa. */
5361FNIEMOP_STUB(iemOp_rsm);
5362//IEMOP_HLP_MIN_386();
5363
5364
5365/** Opcode 0x0f 0xab. */
5366FNIEMOP_DEF(iemOp_bts_Ev_Gv)
5367{
5368 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
5369 IEMOP_HLP_MIN_386();
5370 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
5371}
5372
5373
5374/** Opcode 0x0f 0xac. */
5375FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
5376{
5377 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
5378 IEMOP_HLP_MIN_386();
5379 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
5380}
5381
5382
5383/** Opcode 0x0f 0xad. */
5384FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
5385{
5386 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
5387 IEMOP_HLP_MIN_386();
5388 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
5389}
5390
5391
5392/** Opcode 0x0f 0xae mem/0. */
5393FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
5394{
5395 IEMOP_MNEMONIC(fxsave, "fxsave m512");
5396 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5397 return IEMOP_RAISE_INVALID_OPCODE();
5398
5399 IEM_MC_BEGIN(3, 1);
5400 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5401 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5402 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5406 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
5407 IEM_MC_END();
5408 return VINF_SUCCESS;
5409}
5410
5411
5412/** Opcode 0x0f 0xae mem/1. */
5413FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
5414{
5415 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
5416 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
5417 return IEMOP_RAISE_INVALID_OPCODE();
5418
5419 IEM_MC_BEGIN(3, 1);
5420 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5421 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
5423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5425 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5426 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
5427 IEM_MC_END();
5428 return VINF_SUCCESS;
5429}
5430
5431
5432/** Opcode 0x0f 0xae mem/2. */
5433FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
5434
5435/** Opcode 0x0f 0xae mem/3. */
5436FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
5437
5438/** Opcode 0x0f 0xae mem/4. */
5439FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
5440
5441/** Opcode 0x0f 0xae mem/5. */
5442FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
5443
5444/** Opcode 0x0f 0xae mem/6. */
5445FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
5446
5447/** Opcode 0x0f 0xae mem/7. */
5448FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
5449
5450
5451/** Opcode 0x0f 0xae 11b/5. */
5452FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
5453{
5454 RT_NOREF_PV(bRm);
5455 IEMOP_MNEMONIC(lfence, "lfence");
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5458 return IEMOP_RAISE_INVALID_OPCODE();
5459
5460 IEM_MC_BEGIN(0, 0);
5461 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5462 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5463 else
5464 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468}
5469
5470
5471/** Opcode 0x0f 0xae 11b/6. */
5472FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5473{
5474 RT_NOREF_PV(bRm);
5475 IEMOP_MNEMONIC(mfence, "mfence");
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5478 return IEMOP_RAISE_INVALID_OPCODE();
5479
5480 IEM_MC_BEGIN(0, 0);
5481 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5482 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5483 else
5484 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5485 IEM_MC_ADVANCE_RIP();
5486 IEM_MC_END();
5487 return VINF_SUCCESS;
5488}
5489
5490
5491/** Opcode 0x0f 0xae 11b/7. */
5492FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5493{
5494 RT_NOREF_PV(bRm);
5495 IEMOP_MNEMONIC(sfence, "sfence");
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5497 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
5498 return IEMOP_RAISE_INVALID_OPCODE();
5499
5500 IEM_MC_BEGIN(0, 0);
5501 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
5502 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5503 else
5504 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508}
5509
5510
5511/** Opcode 0xf3 0x0f 0xae 11b/0. */
5512FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5513
5514/** Opcode 0xf3 0x0f 0xae 11b/1. */
5515FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5516
5517/** Opcode 0xf3 0x0f 0xae 11b/2. */
5518FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5519
5520/** Opcode 0xf3 0x0f 0xae 11b/3. */
5521FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5522
5523
5524/** Opcode 0x0f 0xae. */
5525FNIEMOP_DEF(iemOp_Grp15)
5526{
5527 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
5528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5529 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5530 {
5531 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5532 {
5533 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5534 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5535 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5536 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5537 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5538 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5539 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5540 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 else
5545 {
5546 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5547 {
5548 case 0:
5549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5550 {
5551 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5552 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5553 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5554 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5555 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5556 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5557 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5558 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 break;
5562
5563 case IEM_OP_PRF_REPZ:
5564 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5565 {
5566 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5567 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5568 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5569 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5570 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5571 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5572 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5573 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5575 }
5576 break;
5577
5578 default:
5579 return IEMOP_RAISE_INVALID_OPCODE();
5580 }
5581 }
5582}
5583
5584
5585/** Opcode 0x0f 0xaf. */
5586FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5587{
5588 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
5589 IEMOP_HLP_MIN_386();
5590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5592}
5593
5594
5595/** Opcode 0x0f 0xb0. */
5596FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5597{
5598 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
5599 IEMOP_HLP_MIN_486();
5600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5601
5602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5603 {
5604 IEMOP_HLP_DONE_DECODING();
5605 IEM_MC_BEGIN(4, 0);
5606 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5607 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5608 IEM_MC_ARG(uint8_t, u8Src, 2);
5609 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5610
5611 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5612 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5613 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5614 IEM_MC_REF_EFLAGS(pEFlags);
5615 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5616 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5617 else
5618 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5619
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 else
5624 {
5625 IEM_MC_BEGIN(4, 3);
5626 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5627 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5628 IEM_MC_ARG(uint8_t, u8Src, 2);
5629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5631 IEM_MC_LOCAL(uint8_t, u8Al);
5632
5633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5634 IEMOP_HLP_DONE_DECODING();
5635 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5636 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5637 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5638 IEM_MC_FETCH_EFLAGS(EFlags);
5639 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5640 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5641 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5642 else
5643 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5644
5645 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5646 IEM_MC_COMMIT_EFLAGS(EFlags);
5647 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5648 IEM_MC_ADVANCE_RIP();
5649 IEM_MC_END();
5650 }
5651 return VINF_SUCCESS;
5652}
5653
5654/** Opcode 0x0f 0xb1. */
5655FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5656{
5657 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
5658 IEMOP_HLP_MIN_486();
5659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5660
5661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5662 {
5663 IEMOP_HLP_DONE_DECODING();
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(4, 0);
5668 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5669 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5670 IEM_MC_ARG(uint16_t, u16Src, 2);
5671 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5672
5673 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5674 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5675 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5676 IEM_MC_REF_EFLAGS(pEFlags);
5677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5678 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5679 else
5680 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5681
5682 IEM_MC_ADVANCE_RIP();
5683 IEM_MC_END();
5684 return VINF_SUCCESS;
5685
5686 case IEMMODE_32BIT:
5687 IEM_MC_BEGIN(4, 0);
5688 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5689 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5690 IEM_MC_ARG(uint32_t, u32Src, 2);
5691 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5692
5693 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5694 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5695 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5696 IEM_MC_REF_EFLAGS(pEFlags);
5697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5698 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5699 else
5700 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5701
5702 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5703 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(4, 0);
5710 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5712#ifdef RT_ARCH_X86
5713 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5714#else
5715 IEM_MC_ARG(uint64_t, u64Src, 2);
5716#endif
5717 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5718
5719 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5721 IEM_MC_REF_EFLAGS(pEFlags);
5722#ifdef RT_ARCH_X86
5723 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5724 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5725 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5726 else
5727 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5728#else
5729 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5731 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5732 else
5733 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5734#endif
5735
5736 IEM_MC_ADVANCE_RIP();
5737 IEM_MC_END();
5738 return VINF_SUCCESS;
5739
5740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5741 }
5742 }
5743 else
5744 {
5745 switch (pVCpu->iem.s.enmEffOpSize)
5746 {
5747 case IEMMODE_16BIT:
5748 IEM_MC_BEGIN(4, 3);
5749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5750 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5751 IEM_MC_ARG(uint16_t, u16Src, 2);
5752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5754 IEM_MC_LOCAL(uint16_t, u16Ax);
5755
5756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5757 IEMOP_HLP_DONE_DECODING();
5758 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5759 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5760 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5761 IEM_MC_FETCH_EFLAGS(EFlags);
5762 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5764 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5765 else
5766 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5767
5768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5769 IEM_MC_COMMIT_EFLAGS(EFlags);
5770 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5771 IEM_MC_ADVANCE_RIP();
5772 IEM_MC_END();
5773 return VINF_SUCCESS;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(4, 3);
5777 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5778 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5779 IEM_MC_ARG(uint32_t, u32Src, 2);
5780 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5782 IEM_MC_LOCAL(uint32_t, u32Eax);
5783
5784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5785 IEMOP_HLP_DONE_DECODING();
5786 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5787 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5788 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5789 IEM_MC_FETCH_EFLAGS(EFlags);
5790 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5791 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5792 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5793 else
5794 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5795
5796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5797 IEM_MC_COMMIT_EFLAGS(EFlags);
5798 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5799 IEM_MC_ADVANCE_RIP();
5800 IEM_MC_END();
5801 return VINF_SUCCESS;
5802
5803 case IEMMODE_64BIT:
5804 IEM_MC_BEGIN(4, 3);
5805 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5806 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5807#ifdef RT_ARCH_X86
5808 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5809#else
5810 IEM_MC_ARG(uint64_t, u64Src, 2);
5811#endif
5812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5814 IEM_MC_LOCAL(uint64_t, u64Rax);
5815
5816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5817 IEMOP_HLP_DONE_DECODING();
5818 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
5819 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5820 IEM_MC_FETCH_EFLAGS(EFlags);
5821 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5822#ifdef RT_ARCH_X86
5823 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5824 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5825 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5826 else
5827 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5828#else
5829 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
5830 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
5831 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5832 else
5833 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5834#endif
5835
5836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5837 IEM_MC_COMMIT_EFLAGS(EFlags);
5838 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5839 IEM_MC_ADVANCE_RIP();
5840 IEM_MC_END();
5841 return VINF_SUCCESS;
5842
5843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5844 }
5845 }
5846}
5847
5848
5849FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5850{
5851 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5852 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg;
5853
5854 switch (pVCpu->iem.s.enmEffOpSize)
5855 {
5856 case IEMMODE_16BIT:
5857 IEM_MC_BEGIN(5, 1);
5858 IEM_MC_ARG(uint16_t, uSel, 0);
5859 IEM_MC_ARG(uint16_t, offSeg, 1);
5860 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5861 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5863 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5866 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5867 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2);
5868 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5869 IEM_MC_END();
5870 return VINF_SUCCESS;
5871
5872 case IEMMODE_32BIT:
5873 IEM_MC_BEGIN(5, 1);
5874 IEM_MC_ARG(uint16_t, uSel, 0);
5875 IEM_MC_ARG(uint32_t, offSeg, 1);
5876 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5877 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5878 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5879 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5882 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5883 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4);
5884 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 case IEMMODE_64BIT:
5889 IEM_MC_BEGIN(5, 1);
5890 IEM_MC_ARG(uint16_t, uSel, 0);
5891 IEM_MC_ARG(uint64_t, offSeg, 1);
5892 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5893 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5898 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5899 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5900 else
5901 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff);
5902 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8);
5903 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5908 }
5909}
5910
5911
5912/** Opcode 0x0f 0xb2. */
5913FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5914{
5915 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
5916 IEMOP_HLP_MIN_386();
5917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5919 return IEMOP_RAISE_INVALID_OPCODE();
5920 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5921}
5922
5923
5924/** Opcode 0x0f 0xb3. */
5925FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5926{
5927 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
5928 IEMOP_HLP_MIN_386();
5929 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5930}
5931
5932
5933/** Opcode 0x0f 0xb4. */
5934FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5935{
5936 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
5937 IEMOP_HLP_MIN_386();
5938 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5939 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5940 return IEMOP_RAISE_INVALID_OPCODE();
5941 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5942}
5943
5944
5945/** Opcode 0x0f 0xb5. */
5946FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5947{
5948 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
5949 IEMOP_HLP_MIN_386();
5950 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5951 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5952 return IEMOP_RAISE_INVALID_OPCODE();
5953 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5954}
5955
5956
5957/** Opcode 0x0f 0xb6. */
5958FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5959{
5960 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
5961 IEMOP_HLP_MIN_386();
5962
5963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5964
5965 /*
5966 * If rm is denoting a register, no more instruction bytes.
5967 */
5968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5969 {
5970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5971 switch (pVCpu->iem.s.enmEffOpSize)
5972 {
5973 case IEMMODE_16BIT:
5974 IEM_MC_BEGIN(0, 1);
5975 IEM_MC_LOCAL(uint16_t, u16Value);
5976 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5977 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
5978 IEM_MC_ADVANCE_RIP();
5979 IEM_MC_END();
5980 return VINF_SUCCESS;
5981
5982 case IEMMODE_32BIT:
5983 IEM_MC_BEGIN(0, 1);
5984 IEM_MC_LOCAL(uint32_t, u32Value);
5985 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
5987 IEM_MC_ADVANCE_RIP();
5988 IEM_MC_END();
5989 return VINF_SUCCESS;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(0, 1);
5993 IEM_MC_LOCAL(uint64_t, u64Value);
5994 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5995 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
5996 IEM_MC_ADVANCE_RIP();
5997 IEM_MC_END();
5998 return VINF_SUCCESS;
5999
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002 }
6003 else
6004 {
6005 /*
6006 * We're loading a register from memory.
6007 */
6008 switch (pVCpu->iem.s.enmEffOpSize)
6009 {
6010 case IEMMODE_16BIT:
6011 IEM_MC_BEGIN(0, 2);
6012 IEM_MC_LOCAL(uint16_t, u16Value);
6013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6017 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6018 IEM_MC_ADVANCE_RIP();
6019 IEM_MC_END();
6020 return VINF_SUCCESS;
6021
6022 case IEMMODE_32BIT:
6023 IEM_MC_BEGIN(0, 2);
6024 IEM_MC_LOCAL(uint32_t, u32Value);
6025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6029 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6030 IEM_MC_ADVANCE_RIP();
6031 IEM_MC_END();
6032 return VINF_SUCCESS;
6033
6034 case IEMMODE_64BIT:
6035 IEM_MC_BEGIN(0, 2);
6036 IEM_MC_LOCAL(uint64_t, u64Value);
6037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6040 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6041 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6042 IEM_MC_ADVANCE_RIP();
6043 IEM_MC_END();
6044 return VINF_SUCCESS;
6045
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049}
6050
6051
6052/** Opcode 0x0f 0xb7. */
6053FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
6054{
6055 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
6056 IEMOP_HLP_MIN_386();
6057
6058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6059
6060 /** @todo Not entirely sure how the operand size prefix is handled here,
6061 * assuming that it will be ignored. Would be nice to have a few
6062 * test for this. */
6063 /*
6064 * If rm is denoting a register, no more instruction bytes.
6065 */
6066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6067 {
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6070 {
6071 IEM_MC_BEGIN(0, 1);
6072 IEM_MC_LOCAL(uint32_t, u32Value);
6073 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6074 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6075 IEM_MC_ADVANCE_RIP();
6076 IEM_MC_END();
6077 }
6078 else
6079 {
6080 IEM_MC_BEGIN(0, 1);
6081 IEM_MC_LOCAL(uint64_t, u64Value);
6082 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6084 IEM_MC_ADVANCE_RIP();
6085 IEM_MC_END();
6086 }
6087 }
6088 else
6089 {
6090 /*
6091 * We're loading a register from memory.
6092 */
6093 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6094 {
6095 IEM_MC_BEGIN(0, 2);
6096 IEM_MC_LOCAL(uint32_t, u32Value);
6097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6100 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6101 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6102 IEM_MC_ADVANCE_RIP();
6103 IEM_MC_END();
6104 }
6105 else
6106 {
6107 IEM_MC_BEGIN(0, 2);
6108 IEM_MC_LOCAL(uint64_t, u64Value);
6109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6113 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6114 IEM_MC_ADVANCE_RIP();
6115 IEM_MC_END();
6116 }
6117 }
6118 return VINF_SUCCESS;
6119}
6120
6121
6122/** Opcode 0x0f 0xb8. */
6123FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
6124
6125
6126/** Opcode 0x0f 0xb9. */
6127FNIEMOP_DEF(iemOp_Grp10)
6128{
6129 Log(("iemOp_Grp10 -> #UD\n"));
6130 return IEMOP_RAISE_INVALID_OPCODE();
6131}
6132
6133
6134/** Opcode 0x0f 0xba. */
6135FNIEMOP_DEF(iemOp_Grp8)
6136{
6137 IEMOP_HLP_MIN_386();
6138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6139 PCIEMOPBINSIZES pImpl;
6140 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6141 {
6142 case 0: case 1: case 2: case 3:
6143 return IEMOP_RAISE_INVALID_OPCODE();
6144 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break;
6145 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break;
6146 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break;
6147 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break;
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6151
6152 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6153 {
6154 /* register destination. */
6155 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157
6158 switch (pVCpu->iem.s.enmEffOpSize)
6159 {
6160 case IEMMODE_16BIT:
6161 IEM_MC_BEGIN(3, 0);
6162 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6163 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
6164 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6165
6166 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6167 IEM_MC_REF_EFLAGS(pEFlags);
6168 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6169
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_32BIT:
6175 IEM_MC_BEGIN(3, 0);
6176 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6177 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
6178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6179
6180 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6181 IEM_MC_REF_EFLAGS(pEFlags);
6182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6183
6184 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6185 IEM_MC_ADVANCE_RIP();
6186 IEM_MC_END();
6187 return VINF_SUCCESS;
6188
6189 case IEMMODE_64BIT:
6190 IEM_MC_BEGIN(3, 0);
6191 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6192 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
6193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6194
6195 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6196 IEM_MC_REF_EFLAGS(pEFlags);
6197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6198
6199 IEM_MC_ADVANCE_RIP();
6200 IEM_MC_END();
6201 return VINF_SUCCESS;
6202
6203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6204 }
6205 }
6206 else
6207 {
6208 /* memory destination. */
6209
6210 uint32_t fAccess;
6211 if (pImpl->pfnLockedU16)
6212 fAccess = IEM_ACCESS_DATA_RW;
6213 else /* BT */
6214 fAccess = IEM_ACCESS_DATA_R;
6215
6216 /** @todo test negative bit offsets! */
6217 switch (pVCpu->iem.s.enmEffOpSize)
6218 {
6219 case IEMMODE_16BIT:
6220 IEM_MC_BEGIN(3, 1);
6221 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6222 IEM_MC_ARG(uint16_t, u16Src, 1);
6223 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6225
6226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6227 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6228 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
6229 if (pImpl->pfnLockedU16)
6230 IEMOP_HLP_DONE_DECODING();
6231 else
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 IEM_MC_FETCH_EFLAGS(EFlags);
6234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6235 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
6237 else
6238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
6239 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
6240
6241 IEM_MC_COMMIT_EFLAGS(EFlags);
6242 IEM_MC_ADVANCE_RIP();
6243 IEM_MC_END();
6244 return VINF_SUCCESS;
6245
6246 case IEMMODE_32BIT:
6247 IEM_MC_BEGIN(3, 1);
6248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6249 IEM_MC_ARG(uint32_t, u32Src, 1);
6250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6252
6253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6254 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6255 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
6256 if (pImpl->pfnLockedU16)
6257 IEMOP_HLP_DONE_DECODING();
6258 else
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 IEM_MC_FETCH_EFLAGS(EFlags);
6261 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
6264 else
6265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
6266 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
6267
6268 IEM_MC_COMMIT_EFLAGS(EFlags);
6269 IEM_MC_ADVANCE_RIP();
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272
6273 case IEMMODE_64BIT:
6274 IEM_MC_BEGIN(3, 1);
6275 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6276 IEM_MC_ARG(uint64_t, u64Src, 1);
6277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
6278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6279
6280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6281 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
6282 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
6283 if (pImpl->pfnLockedU16)
6284 IEMOP_HLP_DONE_DECODING();
6285 else
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287 IEM_MC_FETCH_EFLAGS(EFlags);
6288 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
6289 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
6291 else
6292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
6293 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
6294
6295 IEM_MC_COMMIT_EFLAGS(EFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 return VINF_SUCCESS;
6299
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 }
6303
6304}
6305
6306
6307/** Opcode 0x0f 0xbb. */
6308FNIEMOP_DEF(iemOp_btc_Ev_Gv)
6309{
6310 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
6311 IEMOP_HLP_MIN_386();
6312 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
6313}
6314
6315
6316/** Opcode 0x0f 0xbc. */
6317FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
6318{
6319 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
6320 IEMOP_HLP_MIN_386();
6321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
6323}
6324
6325
6326/** Opcode 0x0f 0xbd. */
6327FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
6328{
6329 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
6330 IEMOP_HLP_MIN_386();
6331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6332 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
6333}
6334
6335
6336/** Opcode 0x0f 0xbe. */
6337FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
6338{
6339 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
6340 IEMOP_HLP_MIN_386();
6341
6342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6343
6344 /*
6345 * If rm is denoting a register, no more instruction bytes.
6346 */
6347 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6348 {
6349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEM_MC_BEGIN(0, 1);
6354 IEM_MC_LOCAL(uint16_t, u16Value);
6355 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6356 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6357 IEM_MC_ADVANCE_RIP();
6358 IEM_MC_END();
6359 return VINF_SUCCESS;
6360
6361 case IEMMODE_32BIT:
6362 IEM_MC_BEGIN(0, 1);
6363 IEM_MC_LOCAL(uint32_t, u32Value);
6364 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6365 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6366 IEM_MC_ADVANCE_RIP();
6367 IEM_MC_END();
6368 return VINF_SUCCESS;
6369
6370 case IEMMODE_64BIT:
6371 IEM_MC_BEGIN(0, 1);
6372 IEM_MC_LOCAL(uint64_t, u64Value);
6373 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6374 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6375 IEM_MC_ADVANCE_RIP();
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 }
6382 else
6383 {
6384 /*
6385 * We're loading a register from memory.
6386 */
6387 switch (pVCpu->iem.s.enmEffOpSize)
6388 {
6389 case IEMMODE_16BIT:
6390 IEM_MC_BEGIN(0, 2);
6391 IEM_MC_LOCAL(uint16_t, u16Value);
6392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6395 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6396 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
6397 IEM_MC_ADVANCE_RIP();
6398 IEM_MC_END();
6399 return VINF_SUCCESS;
6400
6401 case IEMMODE_32BIT:
6402 IEM_MC_BEGIN(0, 2);
6403 IEM_MC_LOCAL(uint32_t, u32Value);
6404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_64BIT:
6414 IEM_MC_BEGIN(0, 2);
6415 IEM_MC_LOCAL(uint64_t, u64Value);
6416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6419 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6420 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6421 IEM_MC_ADVANCE_RIP();
6422 IEM_MC_END();
6423 return VINF_SUCCESS;
6424
6425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6426 }
6427 }
6428}
6429
6430
6431/** Opcode 0x0f 0xbf. */
6432FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
6433{
6434 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
6435 IEMOP_HLP_MIN_386();
6436
6437 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6438
6439 /** @todo Not entirely sure how the operand size prefix is handled here,
6440 * assuming that it will be ignored. Would be nice to have a few
6441 * test for this. */
6442 /*
6443 * If rm is denoting a register, no more instruction bytes.
6444 */
6445 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6446 {
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6449 {
6450 IEM_MC_BEGIN(0, 1);
6451 IEM_MC_LOCAL(uint32_t, u32Value);
6452 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6453 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6454 IEM_MC_ADVANCE_RIP();
6455 IEM_MC_END();
6456 }
6457 else
6458 {
6459 IEM_MC_BEGIN(0, 1);
6460 IEM_MC_LOCAL(uint64_t, u64Value);
6461 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6462 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6463 IEM_MC_ADVANCE_RIP();
6464 IEM_MC_END();
6465 }
6466 }
6467 else
6468 {
6469 /*
6470 * We're loading a register from memory.
6471 */
6472 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
6473 {
6474 IEM_MC_BEGIN(0, 2);
6475 IEM_MC_LOCAL(uint32_t, u32Value);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6480 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
6481 IEM_MC_ADVANCE_RIP();
6482 IEM_MC_END();
6483 }
6484 else
6485 {
6486 IEM_MC_BEGIN(0, 2);
6487 IEM_MC_LOCAL(uint64_t, u64Value);
6488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6492 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
6493 IEM_MC_ADVANCE_RIP();
6494 IEM_MC_END();
6495 }
6496 }
6497 return VINF_SUCCESS;
6498}
6499
6500
6501/** Opcode 0x0f 0xc0. */
6502FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6503{
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 IEMOP_HLP_MIN_486();
6506 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
6507
6508 /*
6509 * If rm is denoting a register, no more instruction bytes.
6510 */
6511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6512 {
6513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6514
6515 IEM_MC_BEGIN(3, 0);
6516 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6517 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6518 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6519
6520 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6521 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6522 IEM_MC_REF_EFLAGS(pEFlags);
6523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6524
6525 IEM_MC_ADVANCE_RIP();
6526 IEM_MC_END();
6527 }
6528 else
6529 {
6530 /*
6531 * We're accessing memory.
6532 */
6533 IEM_MC_BEGIN(3, 3);
6534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6535 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6536 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6537 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6541 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6542 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6543 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6544 IEM_MC_FETCH_EFLAGS(EFlags);
6545 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6546 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6547 else
6548 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6549
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6551 IEM_MC_COMMIT_EFLAGS(EFlags);
6552 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy);
6553 IEM_MC_ADVANCE_RIP();
6554 IEM_MC_END();
6555 return VINF_SUCCESS;
6556 }
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0x0f 0xc1. */
6562FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6563{
6564 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
6565 IEMOP_HLP_MIN_486();
6566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6567
6568 /*
6569 * If rm is denoting a register, no more instruction bytes.
6570 */
6571 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6572 {
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 switch (pVCpu->iem.s.enmEffOpSize)
6576 {
6577 case IEMMODE_16BIT:
6578 IEM_MC_BEGIN(3, 0);
6579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6580 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6582
6583 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6584 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6585 IEM_MC_REF_EFLAGS(pEFlags);
6586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6587
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 return VINF_SUCCESS;
6591
6592 case IEMMODE_32BIT:
6593 IEM_MC_BEGIN(3, 0);
6594 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6595 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6597
6598 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6599 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6600 IEM_MC_REF_EFLAGS(pEFlags);
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6602
6603 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6604 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6605 IEM_MC_ADVANCE_RIP();
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608
6609 case IEMMODE_64BIT:
6610 IEM_MC_BEGIN(3, 0);
6611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6612 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6613 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6614
6615 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6616 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6617 IEM_MC_REF_EFLAGS(pEFlags);
6618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6619
6620 IEM_MC_ADVANCE_RIP();
6621 IEM_MC_END();
6622 return VINF_SUCCESS;
6623
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 else
6628 {
6629 /*
6630 * We're accessing memory.
6631 */
6632 switch (pVCpu->iem.s.enmEffOpSize)
6633 {
6634 case IEMMODE_16BIT:
6635 IEM_MC_BEGIN(3, 3);
6636 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6637 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6638 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6639 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6644 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6645 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6649 else
6650 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6651
6652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6653 IEM_MC_COMMIT_EFLAGS(EFlags);
6654 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy);
6655 IEM_MC_ADVANCE_RIP();
6656 IEM_MC_END();
6657 return VINF_SUCCESS;
6658
6659 case IEMMODE_32BIT:
6660 IEM_MC_BEGIN(3, 3);
6661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6662 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6664 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6666
6667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6669 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6670 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6671 IEM_MC_FETCH_EFLAGS(EFlags);
6672 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6673 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6674 else
6675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6676
6677 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6678 IEM_MC_COMMIT_EFLAGS(EFlags);
6679 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy);
6680 IEM_MC_ADVANCE_RIP();
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683
6684 case IEMMODE_64BIT:
6685 IEM_MC_BEGIN(3, 3);
6686 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6687 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6689 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6691
6692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6693 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6694 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6695 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6696 IEM_MC_FETCH_EFLAGS(EFlags);
6697 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6699 else
6700 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6701
6702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6703 IEM_MC_COMMIT_EFLAGS(EFlags);
6704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy);
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711 }
6712}
6713
6714/** Opcode 0x0f 0xc2. */
6715FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6716
6717
6718/** Opcode 0x0f 0xc3. */
6719FNIEMOP_DEF(iemOp_movnti_My_Gy)
6720{
6721 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
6722
6723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6724
6725 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
6726 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
6727 {
6728 switch (pVCpu->iem.s.enmEffOpSize)
6729 {
6730 case IEMMODE_32BIT:
6731 IEM_MC_BEGIN(0, 2);
6732 IEM_MC_LOCAL(uint32_t, u32Value);
6733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6734
6735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6738 return IEMOP_RAISE_INVALID_OPCODE();
6739
6740 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6741 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
6742 IEM_MC_ADVANCE_RIP();
6743 IEM_MC_END();
6744 break;
6745
6746 case IEMMODE_64BIT:
6747 IEM_MC_BEGIN(0, 2);
6748 IEM_MC_LOCAL(uint64_t, u64Value);
6749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6750
6751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6753 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2)
6754 return IEMOP_RAISE_INVALID_OPCODE();
6755
6756 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
6757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
6758 IEM_MC_ADVANCE_RIP();
6759 IEM_MC_END();
6760 break;
6761
6762 case IEMMODE_16BIT:
6763 /** @todo check this form. */
6764 return IEMOP_RAISE_INVALID_OPCODE();
6765 }
6766 }
6767 else
6768 return IEMOP_RAISE_INVALID_OPCODE();
6769 return VINF_SUCCESS;
6770}
6771
6772
6773/** Opcode 0x0f 0xc4. */
6774FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6775
6776/** Opcode 0x0f 0xc5. */
6777FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6778
6779/** Opcode 0x0f 0xc6. */
6780FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6781
6782
6783/** Opcode 0x0f 0xc7 !11/1. */
6784FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6785{
6786 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
6787
6788 IEM_MC_BEGIN(4, 3);
6789 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6790 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6791 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6793 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6794 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6796
6797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6798 IEMOP_HLP_DONE_DECODING();
6799 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6800
6801 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6802 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6803 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6804
6805 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6806 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6807 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6808
6809 IEM_MC_FETCH_EFLAGS(EFlags);
6810 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6811 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6812 else
6813 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6814
6815 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6816 IEM_MC_COMMIT_EFLAGS(EFlags);
6817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6818 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6819 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6820 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6821 IEM_MC_ENDIF();
6822 IEM_MC_ADVANCE_RIP();
6823
6824 IEM_MC_END();
6825 return VINF_SUCCESS;
6826}
6827
6828
6829/** Opcode REX.W 0x0f 0xc7 !11/1. */
6830FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
6831{
6832 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
6833 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6834 {
6835#if 0
6836 RT_NOREF(bRm);
6837 IEMOP_BITCH_ABOUT_STUB();
6838 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6839#else
6840 IEM_MC_BEGIN(4, 3);
6841 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
6842 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
6843 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
6844 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6845 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
6846 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
6847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6848
6849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6850 IEMOP_HLP_DONE_DECODING();
6851 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
6852 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6853
6854 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
6855 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
6856 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
6857
6858 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
6859 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
6860 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
6861
6862 IEM_MC_FETCH_EFLAGS(EFlags);
6863# ifdef RT_ARCH_AMD64
6864 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b)
6865 {
6866 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
6867 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6868 else
6869 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6870 }
6871 else
6872# endif
6873 {
6874 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
6875 accesses and not all all atomic, which works fine on in UNI CPU guest
6876 configuration (ignoring DMA). If guest SMP is active we have no choice
6877 but to use a rendezvous callback here. Sigh. */
6878 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
6879 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6880 else
6881 {
6882 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
6883 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
6884 }
6885 }
6886
6887 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
6888 IEM_MC_COMMIT_EFLAGS(EFlags);
6889 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6890 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
6891 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
6892 IEM_MC_ENDIF();
6893 IEM_MC_ADVANCE_RIP();
6894
6895 IEM_MC_END();
6896 return VINF_SUCCESS;
6897#endif
6898 }
6899 Log(("cmpxchg16b -> #UD\n"));
6900 return IEMOP_RAISE_INVALID_OPCODE();
6901}
6902
6903
6904/** Opcode 0x0f 0xc7 11/6. */
6905FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6906
6907/** Opcode 0x0f 0xc7 !11/6. */
6908FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6909
6910/** Opcode 0x66 0x0f 0xc7 !11/6. */
6911FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6912
6913/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6914FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6915
6916/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6917FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6918
6919
6920/** Opcode 0x0f 0xc7. */
6921FNIEMOP_DEF(iemOp_Grp9)
6922{
6923 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6926 {
6927 case 0: case 2: case 3: case 4: case 5:
6928 return IEMOP_RAISE_INVALID_OPCODE();
6929 case 1:
6930 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6931 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6932 || (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6933 return IEMOP_RAISE_INVALID_OPCODE();
6934 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6935 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6936 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6937 case 6:
6938 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6939 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6940 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6941 {
6942 case 0:
6943 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6944 case IEM_OP_PRF_SIZE_OP:
6945 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6946 case IEM_OP_PRF_REPZ:
6947 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6948 default:
6949 return IEMOP_RAISE_INVALID_OPCODE();
6950 }
6951 case 7:
6952 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6953 {
6954 case 0:
6955 case IEM_OP_PRF_REPZ:
6956 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6957 default:
6958 return IEMOP_RAISE_INVALID_OPCODE();
6959 }
6960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6961 }
6962}
6963
6964
6965/**
6966 * Common 'bswap register' helper.
6967 */
6968FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6969{
6970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6971 switch (pVCpu->iem.s.enmEffOpSize)
6972 {
6973 case IEMMODE_16BIT:
6974 IEM_MC_BEGIN(1, 0);
6975 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6976 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6977 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6978 IEM_MC_ADVANCE_RIP();
6979 IEM_MC_END();
6980 return VINF_SUCCESS;
6981
6982 case IEMMODE_32BIT:
6983 IEM_MC_BEGIN(1, 0);
6984 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6985 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6986 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6987 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 return VINF_SUCCESS;
6991
6992 case IEMMODE_64BIT:
6993 IEM_MC_BEGIN(1, 0);
6994 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6995 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6996 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6997 IEM_MC_ADVANCE_RIP();
6998 IEM_MC_END();
6999 return VINF_SUCCESS;
7000
7001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7002 }
7003}
7004
7005
7006/** Opcode 0x0f 0xc8. */
7007FNIEMOP_DEF(iemOp_bswap_rAX_r8)
7008{
7009 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
7010 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
7011 prefix. REX.B is the correct prefix it appears. For a parallel
7012 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
7013 IEMOP_HLP_MIN_486();
7014 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7015}
7016
7017
7018/** Opcode 0x0f 0xc9. */
7019FNIEMOP_DEF(iemOp_bswap_rCX_r9)
7020{
7021 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
7022 IEMOP_HLP_MIN_486();
7023 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7024}
7025
7026
7027/** Opcode 0x0f 0xca. */
7028FNIEMOP_DEF(iemOp_bswap_rDX_r10)
7029{
7030 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9");
7031 IEMOP_HLP_MIN_486();
7032 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7033}
7034
7035
7036/** Opcode 0x0f 0xcb. */
7037FNIEMOP_DEF(iemOp_bswap_rBX_r11)
7038{
7039 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9");
7040 IEMOP_HLP_MIN_486();
7041 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7042}
7043
7044
7045/** Opcode 0x0f 0xcc. */
7046FNIEMOP_DEF(iemOp_bswap_rSP_r12)
7047{
7048 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
7049 IEMOP_HLP_MIN_486();
7050 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7051}
7052
7053
7054/** Opcode 0x0f 0xcd. */
7055FNIEMOP_DEF(iemOp_bswap_rBP_r13)
7056{
7057 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
7058 IEMOP_HLP_MIN_486();
7059 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7060}
7061
7062
7063/** Opcode 0x0f 0xce. */
7064FNIEMOP_DEF(iemOp_bswap_rSI_r14)
7065{
7066 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
7067 IEMOP_HLP_MIN_486();
7068 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7069}
7070
7071
7072/** Opcode 0x0f 0xcf. */
7073FNIEMOP_DEF(iemOp_bswap_rDI_r15)
7074{
7075 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
7076 IEMOP_HLP_MIN_486();
7077 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7078}
7079
7080
7081
7082/** Opcode 0x0f 0xd0. */
7083FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
7084/** Opcode 0x0f 0xd1. */
7085FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
7086/** Opcode 0x0f 0xd2. */
7087FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
7088/** Opcode 0x0f 0xd3. */
7089FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
7090/** Opcode 0x0f 0xd4. */
7091FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
7092/** Opcode 0x0f 0xd5. */
7093FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
7094/** Opcode 0x0f 0xd6. */
7095FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq); /** @todo Win10 w/o np may need this: 66 0f d6 0a */
7096
7097
7098/** Opcode 0x0f 0xd7. */
7099FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
7100{
7101 /* Docs says register only. */
7102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7103 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
7104 return IEMOP_RAISE_INVALID_OPCODE();
7105
7106 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
7107 /** @todo testcase: Check that the instruction implicitly clears the high
7108 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
7109 * and opcode modifications are made to work with the whole width (not
7110 * just 128). */
7111 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7112 {
7113 case IEM_OP_PRF_SIZE_OP: /* SSE */
7114 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "pmovmskb Gd,Nq");
7115 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
7116 IEM_MC_BEGIN(2, 0);
7117 IEM_MC_ARG(uint64_t *, pDst, 0);
7118 IEM_MC_ARG(uint128_t const *, pSrc, 1);
7119 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7120 IEM_MC_PREPARE_SSE_USAGE();
7121 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7122 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
7123 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
7124 IEM_MC_ADVANCE_RIP();
7125 IEM_MC_END();
7126 return VINF_SUCCESS;
7127
7128 case 0: /* MMX */
7129 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq");
7130 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
7131 IEM_MC_BEGIN(2, 0);
7132 IEM_MC_ARG(uint64_t *, pDst, 0);
7133 IEM_MC_ARG(uint64_t const *, pSrc, 1);
7134 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
7135 IEM_MC_PREPARE_FPU_USAGE();
7136 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7137 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
7138 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
7139 IEM_MC_ADVANCE_RIP();
7140 IEM_MC_END();
7141 return VINF_SUCCESS;
7142
7143 default:
7144 return IEMOP_RAISE_INVALID_OPCODE();
7145 }
7146}
7147
7148
7149/** Opcode 0x0f 0xd8. */
7150FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
7151/** Opcode 0x0f 0xd9. */
7152FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
7153/** Opcode 0x0f 0xda. */
7154FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
7155/** Opcode 0x0f 0xdb. */
7156FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
7157/** Opcode 0x0f 0xdc. */
7158FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
7159/** Opcode 0x0f 0xdd. */
7160FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
7161/** Opcode 0x0f 0xde. */
7162FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
7163/** Opcode 0x0f 0xdf. */
7164FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
7165/** Opcode 0x0f 0xe0. */
7166FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
7167/** Opcode 0x0f 0xe1. */
7168FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
7169/** Opcode 0x0f 0xe2. */
7170FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
7171/** Opcode 0x0f 0xe3. */
7172FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
7173/** Opcode 0x0f 0xe4. */
7174FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
7175/** Opcode 0x0f 0xe5. */
7176FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
7177/** Opcode 0x0f 0xe6. */
7178FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
7179
7180
7181/** Opcode 0x0f 0xe7. */
7182FNIEMOP_DEF(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq)
7183{
7184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7185 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
7186 {
7187 /*
7188 * Register, memory.
7189 */
7190/** @todo check when the REPNZ/Z bits kick in. Same as lock, probably... */
7191 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7192 {
7193
7194 case IEM_OP_PRF_SIZE_OP: /* SSE */
7195 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq");
7196 IEM_MC_BEGIN(0, 2);
7197 IEM_MC_LOCAL(uint128_t, uSrc);
7198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7199
7200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7202 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
7203 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7204
7205 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
7206 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7207
7208 IEM_MC_ADVANCE_RIP();
7209 IEM_MC_END();
7210 break;
7211
7212 case 0: /* MMX */
7213 IEMOP_MNEMONIC(movntdq_Mdq_Vdq, "movntdq Mdq,Vdq");
7214 IEM_MC_BEGIN(0, 2);
7215 IEM_MC_LOCAL(uint64_t, uSrc);
7216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7217
7218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7221 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7222
7223 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
7224 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
7225
7226 IEM_MC_ADVANCE_RIP();
7227 IEM_MC_END();
7228 break;
7229
7230 default:
7231 return IEMOP_RAISE_INVALID_OPCODE();
7232 }
7233 }
7234 /* The register, register encoding is invalid. */
7235 else
7236 return IEMOP_RAISE_INVALID_OPCODE();
7237 return VINF_SUCCESS;
7238}
7239
7240
7241/** Opcode 0x0f 0xe8. */
7242FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
7243/** Opcode 0x0f 0xe9. */
7244FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
7245/** Opcode 0x0f 0xea. */
7246FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
7247/** Opcode 0x0f 0xeb. */
7248FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
7249/** Opcode 0x0f 0xec. */
7250FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
7251/** Opcode 0x0f 0xed. */
7252FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
7253/** Opcode 0x0f 0xee. */
7254FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
7255
7256
7257/** Opcode 0x0f 0xef. */
7258FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
7259{
7260 IEMOP_MNEMONIC(pxor, "pxor");
7261 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
7262}
7263
7264
7265/** Opcode 0x0f 0xf0. */
7266FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
7267/** Opcode 0x0f 0xf1. */
7268FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
7269/** Opcode 0x0f 0xf2. */
7270FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
7271/** Opcode 0x0f 0xf3. */
7272FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
7273/** Opcode 0x0f 0xf4. */
7274FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
7275/** Opcode 0x0f 0xf5. */
7276FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
7277/** Opcode 0x0f 0xf6. */
7278FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
7279/** Opcode 0x0f 0xf7. */
7280FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
7281/** Opcode 0x0f 0xf8. */
7282FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
7283/** Opcode 0x0f 0xf9. */
7284FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
7285/** Opcode 0x0f 0xfa. */
7286FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
7287/** Opcode 0x0f 0xfb. */
7288FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
7289/** Opcode 0x0f 0xfc. */
7290FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
7291/** Opcode 0x0f 0xfd. */
7292FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
7293/** Opcode 0x0f 0xfe. */
7294FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
7295
7296
7297IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[256] =
7298{
7299 /* 0x00 */ iemOp_Grp6,
7300 /* 0x01 */ iemOp_Grp7,
7301 /* 0x02 */ iemOp_lar_Gv_Ew,
7302 /* 0x03 */ iemOp_lsl_Gv_Ew,
7303 /* 0x04 */ iemOp_Invalid,
7304 /* 0x05 */ iemOp_syscall,
7305 /* 0x06 */ iemOp_clts,
7306 /* 0x07 */ iemOp_sysret,
7307 /* 0x08 */ iemOp_invd,
7308 /* 0x09 */ iemOp_wbinvd,
7309 /* 0x0a */ iemOp_Invalid,
7310 /* 0x0b */ iemOp_ud2,
7311 /* 0x0c */ iemOp_Invalid,
7312 /* 0x0d */ iemOp_nop_Ev_GrpP,
7313 /* 0x0e */ iemOp_femms,
7314 /* 0x0f */ iemOp_3Dnow,
7315 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
7316 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
7317 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
7318 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
7319 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
7320 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
7321 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
7322 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
7323 /* 0x18 */ iemOp_prefetch_Grp16,
7324 /* 0x19 */ iemOp_nop_Ev,
7325 /* 0x1a */ iemOp_nop_Ev,
7326 /* 0x1b */ iemOp_nop_Ev,
7327 /* 0x1c */ iemOp_nop_Ev,
7328 /* 0x1d */ iemOp_nop_Ev,
7329 /* 0x1e */ iemOp_nop_Ev,
7330 /* 0x1f */ iemOp_nop_Ev,
7331 /* 0x20 */ iemOp_mov_Rd_Cd,
7332 /* 0x21 */ iemOp_mov_Rd_Dd,
7333 /* 0x22 */ iemOp_mov_Cd_Rd,
7334 /* 0x23 */ iemOp_mov_Dd_Rd,
7335 /* 0x24 */ iemOp_mov_Rd_Td,
7336 /* 0x25 */ iemOp_Invalid,
7337 /* 0x26 */ iemOp_mov_Td_Rd,
7338 /* 0x27 */ iemOp_Invalid,
7339 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
7340 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
7341 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
7342 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
7343 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
7344 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
7345 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
7346 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
7347 /* 0x30 */ iemOp_wrmsr,
7348 /* 0x31 */ iemOp_rdtsc,
7349 /* 0x32 */ iemOp_rdmsr,
7350 /* 0x33 */ iemOp_rdpmc,
7351 /* 0x34 */ iemOp_sysenter,
7352 /* 0x35 */ iemOp_sysexit,
7353 /* 0x36 */ iemOp_Invalid,
7354 /* 0x37 */ iemOp_getsec,
7355 /* 0x38 */ iemOp_3byte_Esc_A4,
7356 /* 0x39 */ iemOp_Invalid,
7357 /* 0x3a */ iemOp_3byte_Esc_A5,
7358 /* 0x3b */ iemOp_Invalid,
7359 /* 0x3c */ iemOp_Invalid,
7360 /* 0x3d */ iemOp_Invalid,
7361 /* 0x3e */ iemOp_Invalid,
7362 /* 0x3f */ iemOp_Invalid,
7363 /* 0x40 */ iemOp_cmovo_Gv_Ev,
7364 /* 0x41 */ iemOp_cmovno_Gv_Ev,
7365 /* 0x42 */ iemOp_cmovc_Gv_Ev,
7366 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
7367 /* 0x44 */ iemOp_cmove_Gv_Ev,
7368 /* 0x45 */ iemOp_cmovne_Gv_Ev,
7369 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
7370 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
7371 /* 0x48 */ iemOp_cmovs_Gv_Ev,
7372 /* 0x49 */ iemOp_cmovns_Gv_Ev,
7373 /* 0x4a */ iemOp_cmovp_Gv_Ev,
7374 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
7375 /* 0x4c */ iemOp_cmovl_Gv_Ev,
7376 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
7377 /* 0x4e */ iemOp_cmovle_Gv_Ev,
7378 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
7379 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
7380 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
7381 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
7382 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
7383 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
7384 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
7385 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
7386 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
7387 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
7388 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
7389 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
7390 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
7391 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
7392 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
7393 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
7394 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
7395 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
7396 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
7397 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
7398 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
7399 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
7400 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
7401 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
7402 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
7403 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
7404 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
7405 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
7406 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
7407 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
7408 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
7409 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
7410 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
7411 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
7412 /* 0x71 */ iemOp_Grp12,
7413 /* 0x72 */ iemOp_Grp13,
7414 /* 0x73 */ iemOp_Grp14,
7415 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
7416 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
7417 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
7418 /* 0x77 */ iemOp_emms,
7419 /* 0x78 */ iemOp_vmread_AmdGrp17,
7420 /* 0x79 */ iemOp_vmwrite,
7421 /* 0x7a */ iemOp_Invalid,
7422 /* 0x7b */ iemOp_Invalid,
7423 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
7424 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
7425 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
7426 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
7427 /* 0x80 */ iemOp_jo_Jv,
7428 /* 0x81 */ iemOp_jno_Jv,
7429 /* 0x82 */ iemOp_jc_Jv,
7430 /* 0x83 */ iemOp_jnc_Jv,
7431 /* 0x84 */ iemOp_je_Jv,
7432 /* 0x85 */ iemOp_jne_Jv,
7433 /* 0x86 */ iemOp_jbe_Jv,
7434 /* 0x87 */ iemOp_jnbe_Jv,
7435 /* 0x88 */ iemOp_js_Jv,
7436 /* 0x89 */ iemOp_jns_Jv,
7437 /* 0x8a */ iemOp_jp_Jv,
7438 /* 0x8b */ iemOp_jnp_Jv,
7439 /* 0x8c */ iemOp_jl_Jv,
7440 /* 0x8d */ iemOp_jnl_Jv,
7441 /* 0x8e */ iemOp_jle_Jv,
7442 /* 0x8f */ iemOp_jnle_Jv,
7443 /* 0x90 */ iemOp_seto_Eb,
7444 /* 0x91 */ iemOp_setno_Eb,
7445 /* 0x92 */ iemOp_setc_Eb,
7446 /* 0x93 */ iemOp_setnc_Eb,
7447 /* 0x94 */ iemOp_sete_Eb,
7448 /* 0x95 */ iemOp_setne_Eb,
7449 /* 0x96 */ iemOp_setbe_Eb,
7450 /* 0x97 */ iemOp_setnbe_Eb,
7451 /* 0x98 */ iemOp_sets_Eb,
7452 /* 0x99 */ iemOp_setns_Eb,
7453 /* 0x9a */ iemOp_setp_Eb,
7454 /* 0x9b */ iemOp_setnp_Eb,
7455 /* 0x9c */ iemOp_setl_Eb,
7456 /* 0x9d */ iemOp_setnl_Eb,
7457 /* 0x9e */ iemOp_setle_Eb,
7458 /* 0x9f */ iemOp_setnle_Eb,
7459 /* 0xa0 */ iemOp_push_fs,
7460 /* 0xa1 */ iemOp_pop_fs,
7461 /* 0xa2 */ iemOp_cpuid,
7462 /* 0xa3 */ iemOp_bt_Ev_Gv,
7463 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
7464 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
7465 /* 0xa6 */ iemOp_Invalid,
7466 /* 0xa7 */ iemOp_Invalid,
7467 /* 0xa8 */ iemOp_push_gs,
7468 /* 0xa9 */ iemOp_pop_gs,
7469 /* 0xaa */ iemOp_rsm,
7470 /* 0xab */ iemOp_bts_Ev_Gv,
7471 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
7472 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
7473 /* 0xae */ iemOp_Grp15,
7474 /* 0xaf */ iemOp_imul_Gv_Ev,
7475 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
7476 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
7477 /* 0xb2 */ iemOp_lss_Gv_Mp,
7478 /* 0xb3 */ iemOp_btr_Ev_Gv,
7479 /* 0xb4 */ iemOp_lfs_Gv_Mp,
7480 /* 0xb5 */ iemOp_lgs_Gv_Mp,
7481 /* 0xb6 */ iemOp_movzx_Gv_Eb,
7482 /* 0xb7 */ iemOp_movzx_Gv_Ew,
7483 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
7484 /* 0xb9 */ iemOp_Grp10,
7485 /* 0xba */ iemOp_Grp8,
7486 /* 0xbb */ iemOp_btc_Ev_Gv,
7487 /* 0xbc */ iemOp_bsf_Gv_Ev,
7488 /* 0xbd */ iemOp_bsr_Gv_Ev,
7489 /* 0xbe */ iemOp_movsx_Gv_Eb,
7490 /* 0xbf */ iemOp_movsx_Gv_Ew,
7491 /* 0xc0 */ iemOp_xadd_Eb_Gb,
7492 /* 0xc1 */ iemOp_xadd_Ev_Gv,
7493 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
7494 /* 0xc3 */ iemOp_movnti_My_Gy,
7495 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
7496 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
7497 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
7498 /* 0xc7 */ iemOp_Grp9,
7499 /* 0xc8 */ iemOp_bswap_rAX_r8,
7500 /* 0xc9 */ iemOp_bswap_rCX_r9,
7501 /* 0xca */ iemOp_bswap_rDX_r10,
7502 /* 0xcb */ iemOp_bswap_rBX_r11,
7503 /* 0xcc */ iemOp_bswap_rSP_r12,
7504 /* 0xcd */ iemOp_bswap_rBP_r13,
7505 /* 0xce */ iemOp_bswap_rSI_r14,
7506 /* 0xcf */ iemOp_bswap_rDI_r15,
7507 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
7508 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
7509 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
7510 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
7511 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
7512 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
7513 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
7514 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
7515 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
7516 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
7517 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
7518 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
7519 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
7520 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
7521 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
7522 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
7523 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
7524 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
7525 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
7526 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
7527 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
7528 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
7529 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
7530 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
7531 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
7532 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
7533 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
7534 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
7535 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
7536 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
7537 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
7538 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
7539 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
7540 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
7541 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
7542 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
7543 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
7544 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
7545 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
7546 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
7547 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
7548 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
7549 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
7550 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
7551 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
7552 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
7553 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
7554 /* 0xff */ iemOp_Invalid
7555};
7556
7557/** @} */
7558
7559
7560/** @name One byte opcodes.
7561 *
7562 * @{
7563 */
7564
7565/** Opcode 0x00. */
7566FNIEMOP_DEF(iemOp_add_Eb_Gb)
7567{
7568 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
7569 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
7570}
7571
7572
7573/** Opcode 0x01. */
7574FNIEMOP_DEF(iemOp_add_Ev_Gv)
7575{
7576 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
7577 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
7578}
7579
7580
7581/** Opcode 0x02. */
7582FNIEMOP_DEF(iemOp_add_Gb_Eb)
7583{
7584 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
7585 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
7586}
7587
7588
7589/** Opcode 0x03. */
7590FNIEMOP_DEF(iemOp_add_Gv_Ev)
7591{
7592 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
7593 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
7594}
7595
7596
7597/** Opcode 0x04. */
7598FNIEMOP_DEF(iemOp_add_Al_Ib)
7599{
7600 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
7601 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
7602}
7603
7604
7605/** Opcode 0x05. */
7606FNIEMOP_DEF(iemOp_add_eAX_Iz)
7607{
7608 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
7609 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
7610}
7611
7612
7613/** Opcode 0x06. */
7614FNIEMOP_DEF(iemOp_push_ES)
7615{
7616 IEMOP_MNEMONIC(push_es, "push es");
7617 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
7618}
7619
7620
7621/** Opcode 0x07. */
7622FNIEMOP_DEF(iemOp_pop_ES)
7623{
7624 IEMOP_MNEMONIC(pop_es, "pop es");
7625 IEMOP_HLP_NO_64BIT();
7626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7627 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
7628}
7629
7630
7631/** Opcode 0x08. */
7632FNIEMOP_DEF(iemOp_or_Eb_Gb)
7633{
7634 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
7635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
7637}
7638
7639
7640/** Opcode 0x09. */
7641FNIEMOP_DEF(iemOp_or_Ev_Gv)
7642{
7643 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
7644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7645 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
7646}
7647
7648
7649/** Opcode 0x0a. */
7650FNIEMOP_DEF(iemOp_or_Gb_Eb)
7651{
7652 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
7653 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7654 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
7655}
7656
7657
7658/** Opcode 0x0b. */
7659FNIEMOP_DEF(iemOp_or_Gv_Ev)
7660{
7661 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
7662 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7663 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
7664}
7665
7666
7667/** Opcode 0x0c. */
7668FNIEMOP_DEF(iemOp_or_Al_Ib)
7669{
7670 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7672 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
7673}
7674
7675
7676/** Opcode 0x0d. */
7677FNIEMOP_DEF(iemOp_or_eAX_Iz)
7678{
7679 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
7680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
7682}
7683
7684
7685/** Opcode 0x0e. */
7686FNIEMOP_DEF(iemOp_push_CS)
7687{
7688 IEMOP_MNEMONIC(push_cs, "push cs");
7689 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
7690}
7691
7692
7693/** Opcode 0x0f. */
7694FNIEMOP_DEF(iemOp_2byteEscape)
7695{
7696 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7697 /** @todo PUSH CS on 8086, undefined on 80186. */
7698 IEMOP_HLP_MIN_286();
7699 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7700}
7701
7702/** Opcode 0x10. */
7703FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7704{
7705 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
7706 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7707}
7708
7709
7710/** Opcode 0x11. */
7711FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7712{
7713 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
7714 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7715}
7716
7717
7718/** Opcode 0x12. */
7719FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7720{
7721 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
7722 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7723}
7724
7725
7726/** Opcode 0x13. */
7727FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7728{
7729 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
7730 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7731}
7732
7733
7734/** Opcode 0x14. */
7735FNIEMOP_DEF(iemOp_adc_Al_Ib)
7736{
7737 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
7738 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7739}
7740
7741
7742/** Opcode 0x15. */
7743FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7744{
7745 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
7746 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7747}
7748
7749
7750/** Opcode 0x16. */
7751FNIEMOP_DEF(iemOp_push_SS)
7752{
7753 IEMOP_MNEMONIC(push_ss, "push ss");
7754 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7755}
7756
7757
7758/** Opcode 0x17. */
7759FNIEMOP_DEF(iemOp_pop_SS)
7760{
7761 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
7762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7763 IEMOP_HLP_NO_64BIT();
7764 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
7765}
7766
7767
7768/** Opcode 0x18. */
7769FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7770{
7771 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
7772 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7773}
7774
7775
7776/** Opcode 0x19. */
7777FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7778{
7779 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
7780 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7781}
7782
7783
7784/** Opcode 0x1a. */
7785FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7786{
7787 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
7788 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7789}
7790
7791
7792/** Opcode 0x1b. */
7793FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7794{
7795 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
7796 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7797}
7798
7799
7800/** Opcode 0x1c. */
7801FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7802{
7803 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
7804 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7805}
7806
7807
7808/** Opcode 0x1d. */
7809FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7810{
7811 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
7812 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7813}
7814
7815
7816/** Opcode 0x1e. */
7817FNIEMOP_DEF(iemOp_push_DS)
7818{
7819 IEMOP_MNEMONIC(push_ds, "push ds");
7820 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7821}
7822
7823
7824/** Opcode 0x1f. */
7825FNIEMOP_DEF(iemOp_pop_DS)
7826{
7827 IEMOP_MNEMONIC(pop_ds, "pop ds");
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829 IEMOP_HLP_NO_64BIT();
7830 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
7831}
7832
7833
7834/** Opcode 0x20. */
7835FNIEMOP_DEF(iemOp_and_Eb_Gb)
7836{
7837 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
7838 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7839 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7840}
7841
7842
7843/** Opcode 0x21. */
7844FNIEMOP_DEF(iemOp_and_Ev_Gv)
7845{
7846 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
7847 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7848 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7849}
7850
7851
7852/** Opcode 0x22. */
7853FNIEMOP_DEF(iemOp_and_Gb_Eb)
7854{
7855 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
7856 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7857 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7858}
7859
7860
7861/** Opcode 0x23. */
7862FNIEMOP_DEF(iemOp_and_Gv_Ev)
7863{
7864 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
7865 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7866 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7867}
7868
7869
7870/** Opcode 0x24. */
7871FNIEMOP_DEF(iemOp_and_Al_Ib)
7872{
7873 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
7874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7876}
7877
7878
7879/** Opcode 0x25. */
7880FNIEMOP_DEF(iemOp_and_eAX_Iz)
7881{
7882 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
7883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7884 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7885}
7886
7887
7888/** Opcode 0x26. */
7889FNIEMOP_DEF(iemOp_seg_ES)
7890{
7891 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7892 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
7893 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
7894
7895 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7896 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7897}
7898
7899
7900/** Opcode 0x27. */
7901FNIEMOP_DEF(iemOp_daa)
7902{
7903 IEMOP_MNEMONIC(daa_AL, "daa AL");
7904 IEMOP_HLP_NO_64BIT();
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7907 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7908}
7909
7910
7911/** Opcode 0x28. */
7912FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7913{
7914 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
7915 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7916}
7917
7918
7919/** Opcode 0x29. */
7920FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7921{
7922 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
7923 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7924}
7925
7926
7927/** Opcode 0x2a. */
7928FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7929{
7930 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
7931 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7932}
7933
7934
7935/** Opcode 0x2b. */
7936FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7937{
7938 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
7939 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7940}
7941
7942
7943/** Opcode 0x2c. */
7944FNIEMOP_DEF(iemOp_sub_Al_Ib)
7945{
7946 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
7947 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7948}
7949
7950
7951/** Opcode 0x2d. */
7952FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7953{
7954 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
7955 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7956}
7957
7958
7959/** Opcode 0x2e. */
7960FNIEMOP_DEF(iemOp_seg_CS)
7961{
7962 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7963 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
7964 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
7965
7966 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7967 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7968}
7969
7970
7971/** Opcode 0x2f. */
7972FNIEMOP_DEF(iemOp_das)
7973{
7974 IEMOP_MNEMONIC(das_AL, "das AL");
7975 IEMOP_HLP_NO_64BIT();
7976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7977 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7978 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7979}
7980
7981
7982/** Opcode 0x30. */
7983FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7984{
7985 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
7986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7987 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7988}
7989
7990
7991/** Opcode 0x31. */
7992FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7993{
7994 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
7995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7996 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7997}
7998
7999
8000/** Opcode 0x32. */
8001FNIEMOP_DEF(iemOp_xor_Gb_Eb)
8002{
8003 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
8004 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8005 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
8006}
8007
8008
8009/** Opcode 0x33. */
8010FNIEMOP_DEF(iemOp_xor_Gv_Ev)
8011{
8012 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
8013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
8015}
8016
8017
8018/** Opcode 0x34. */
8019FNIEMOP_DEF(iemOp_xor_Al_Ib)
8020{
8021 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
8022 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8023 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
8024}
8025
8026
8027/** Opcode 0x35. */
8028FNIEMOP_DEF(iemOp_xor_eAX_Iz)
8029{
8030 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
8031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8032 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
8033}
8034
8035
8036/** Opcode 0x36. */
8037FNIEMOP_DEF(iemOp_seg_SS)
8038{
8039 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
8040 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
8041 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
8042
8043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8045}
8046
8047
8048/** Opcode 0x37. */
8049FNIEMOP_STUB(iemOp_aaa);
8050
8051
8052/** Opcode 0x38. */
8053FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
8054{
8055 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
8056 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
8057}
8058
8059
8060/** Opcode 0x39. */
8061FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
8062{
8063 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
8064 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
8065}
8066
8067
8068/** Opcode 0x3a. */
8069FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
8070{
8071 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
8072 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
8073}
8074
8075
8076/** Opcode 0x3b. */
8077FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
8078{
8079 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
8080 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
8081}
8082
8083
8084/** Opcode 0x3c. */
8085FNIEMOP_DEF(iemOp_cmp_Al_Ib)
8086{
8087 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
8088 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
8089}
8090
8091
8092/** Opcode 0x3d. */
8093FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
8094{
8095 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
8096 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
8097}
8098
8099
8100/** Opcode 0x3e. */
8101FNIEMOP_DEF(iemOp_seg_DS)
8102{
8103 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
8104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
8105 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
8106
8107 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8108 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8109}
8110
8111
8112/** Opcode 0x3f. */
8113FNIEMOP_STUB(iemOp_aas);
8114
8115/**
8116 * Common 'inc/dec/not/neg register' helper.
8117 */
8118FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
8119{
8120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8121 switch (pVCpu->iem.s.enmEffOpSize)
8122 {
8123 case IEMMODE_16BIT:
8124 IEM_MC_BEGIN(2, 0);
8125 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8126 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8127 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8128 IEM_MC_REF_EFLAGS(pEFlags);
8129 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
8130 IEM_MC_ADVANCE_RIP();
8131 IEM_MC_END();
8132 return VINF_SUCCESS;
8133
8134 case IEMMODE_32BIT:
8135 IEM_MC_BEGIN(2, 0);
8136 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8137 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8138 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8139 IEM_MC_REF_EFLAGS(pEFlags);
8140 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
8141 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8142 IEM_MC_ADVANCE_RIP();
8143 IEM_MC_END();
8144 return VINF_SUCCESS;
8145
8146 case IEMMODE_64BIT:
8147 IEM_MC_BEGIN(2, 0);
8148 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8149 IEM_MC_ARG(uint32_t *, pEFlags, 1);
8150 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8151 IEM_MC_REF_EFLAGS(pEFlags);
8152 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
8153 IEM_MC_ADVANCE_RIP();
8154 IEM_MC_END();
8155 return VINF_SUCCESS;
8156 }
8157 return VINF_SUCCESS;
8158}
8159
8160
8161/** Opcode 0x40. */
8162FNIEMOP_DEF(iemOp_inc_eAX)
8163{
8164 /*
8165 * This is a REX prefix in 64-bit mode.
8166 */
8167 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8168 {
8169 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
8170 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
8171
8172 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8173 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8174 }
8175
8176 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
8177 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
8178}
8179
8180
8181/** Opcode 0x41. */
8182FNIEMOP_DEF(iemOp_inc_eCX)
8183{
8184 /*
8185 * This is a REX prefix in 64-bit mode.
8186 */
8187 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8188 {
8189 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
8190 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
8191 pVCpu->iem.s.uRexB = 1 << 3;
8192
8193 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8194 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8195 }
8196
8197 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
8198 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
8199}
8200
8201
8202/** Opcode 0x42. */
8203FNIEMOP_DEF(iemOp_inc_eDX)
8204{
8205 /*
8206 * This is a REX prefix in 64-bit mode.
8207 */
8208 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8209 {
8210 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
8211 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
8212 pVCpu->iem.s.uRexIndex = 1 << 3;
8213
8214 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8215 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8216 }
8217
8218 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
8219 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
8220}
8221
8222
8223
8224/** Opcode 0x43. */
8225FNIEMOP_DEF(iemOp_inc_eBX)
8226{
8227 /*
8228 * This is a REX prefix in 64-bit mode.
8229 */
8230 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8231 {
8232 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
8233 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8234 pVCpu->iem.s.uRexB = 1 << 3;
8235 pVCpu->iem.s.uRexIndex = 1 << 3;
8236
8237 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8238 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8239 }
8240
8241 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
8242 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
8243}
8244
8245
8246/** Opcode 0x44. */
8247FNIEMOP_DEF(iemOp_inc_eSP)
8248{
8249 /*
8250 * This is a REX prefix in 64-bit mode.
8251 */
8252 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8253 {
8254 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
8255 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
8256 pVCpu->iem.s.uRexReg = 1 << 3;
8257
8258 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8259 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8260 }
8261
8262 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
8263 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
8264}
8265
8266
8267/** Opcode 0x45. */
8268FNIEMOP_DEF(iemOp_inc_eBP)
8269{
8270 /*
8271 * This is a REX prefix in 64-bit mode.
8272 */
8273 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8274 {
8275 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
8276 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
8277 pVCpu->iem.s.uRexReg = 1 << 3;
8278 pVCpu->iem.s.uRexB = 1 << 3;
8279
8280 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8281 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8282 }
8283
8284 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
8285 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
8286}
8287
8288
8289/** Opcode 0x46. */
8290FNIEMOP_DEF(iemOp_inc_eSI)
8291{
8292 /*
8293 * This is a REX prefix in 64-bit mode.
8294 */
8295 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8296 {
8297 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
8298 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
8299 pVCpu->iem.s.uRexReg = 1 << 3;
8300 pVCpu->iem.s.uRexIndex = 1 << 3;
8301
8302 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8303 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8304 }
8305
8306 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
8307 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
8308}
8309
8310
8311/** Opcode 0x47. */
8312FNIEMOP_DEF(iemOp_inc_eDI)
8313{
8314 /*
8315 * This is a REX prefix in 64-bit mode.
8316 */
8317 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8318 {
8319 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
8320 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
8321 pVCpu->iem.s.uRexReg = 1 << 3;
8322 pVCpu->iem.s.uRexB = 1 << 3;
8323 pVCpu->iem.s.uRexIndex = 1 << 3;
8324
8325 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8326 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8327 }
8328
8329 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
8330 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
8331}
8332
8333
8334/** Opcode 0x48. */
8335FNIEMOP_DEF(iemOp_dec_eAX)
8336{
8337 /*
8338 * This is a REX prefix in 64-bit mode.
8339 */
8340 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8341 {
8342 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
8343 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
8344 iemRecalEffOpSize(pVCpu);
8345
8346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8348 }
8349
8350 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
8351 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
8352}
8353
8354
8355/** Opcode 0x49. */
8356FNIEMOP_DEF(iemOp_dec_eCX)
8357{
8358 /*
8359 * This is a REX prefix in 64-bit mode.
8360 */
8361 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8362 {
8363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
8364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8365 pVCpu->iem.s.uRexB = 1 << 3;
8366 iemRecalEffOpSize(pVCpu);
8367
8368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8370 }
8371
8372 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
8373 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
8374}
8375
8376
8377/** Opcode 0x4a. */
8378FNIEMOP_DEF(iemOp_dec_eDX)
8379{
8380 /*
8381 * This is a REX prefix in 64-bit mode.
8382 */
8383 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8384 {
8385 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
8386 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8387 pVCpu->iem.s.uRexIndex = 1 << 3;
8388 iemRecalEffOpSize(pVCpu);
8389
8390 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8391 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8392 }
8393
8394 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
8395 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
8396}
8397
8398
8399/** Opcode 0x4b. */
8400FNIEMOP_DEF(iemOp_dec_eBX)
8401{
8402 /*
8403 * This is a REX prefix in 64-bit mode.
8404 */
8405 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8406 {
8407 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
8408 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8409 pVCpu->iem.s.uRexB = 1 << 3;
8410 pVCpu->iem.s.uRexIndex = 1 << 3;
8411 iemRecalEffOpSize(pVCpu);
8412
8413 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8414 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8415 }
8416
8417 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
8418 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
8419}
8420
8421
8422/** Opcode 0x4c. */
8423FNIEMOP_DEF(iemOp_dec_eSP)
8424{
8425 /*
8426 * This is a REX prefix in 64-bit mode.
8427 */
8428 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8429 {
8430 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
8431 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
8432 pVCpu->iem.s.uRexReg = 1 << 3;
8433 iemRecalEffOpSize(pVCpu);
8434
8435 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8436 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8437 }
8438
8439 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
8440 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
8441}
8442
8443
8444/** Opcode 0x4d. */
8445FNIEMOP_DEF(iemOp_dec_eBP)
8446{
8447 /*
8448 * This is a REX prefix in 64-bit mode.
8449 */
8450 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8451 {
8452 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
8453 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
8454 pVCpu->iem.s.uRexReg = 1 << 3;
8455 pVCpu->iem.s.uRexB = 1 << 3;
8456 iemRecalEffOpSize(pVCpu);
8457
8458 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8459 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8460 }
8461
8462 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
8463 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
8464}
8465
8466
8467/** Opcode 0x4e. */
8468FNIEMOP_DEF(iemOp_dec_eSI)
8469{
8470 /*
8471 * This is a REX prefix in 64-bit mode.
8472 */
8473 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8474 {
8475 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
8476 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8477 pVCpu->iem.s.uRexReg = 1 << 3;
8478 pVCpu->iem.s.uRexIndex = 1 << 3;
8479 iemRecalEffOpSize(pVCpu);
8480
8481 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8482 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8483 }
8484
8485 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
8486 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
8487}
8488
8489
8490/** Opcode 0x4f. */
8491FNIEMOP_DEF(iemOp_dec_eDI)
8492{
8493 /*
8494 * This is a REX prefix in 64-bit mode.
8495 */
8496 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8497 {
8498 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
8499 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
8500 pVCpu->iem.s.uRexReg = 1 << 3;
8501 pVCpu->iem.s.uRexB = 1 << 3;
8502 pVCpu->iem.s.uRexIndex = 1 << 3;
8503 iemRecalEffOpSize(pVCpu);
8504
8505 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8506 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8507 }
8508
8509 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
8510 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
8511}
8512
8513
8514/**
8515 * Common 'push register' helper.
8516 */
8517FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
8518{
8519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8520 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8521 {
8522 iReg |= pVCpu->iem.s.uRexB;
8523 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8524 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8525 }
8526
8527 switch (pVCpu->iem.s.enmEffOpSize)
8528 {
8529 case IEMMODE_16BIT:
8530 IEM_MC_BEGIN(0, 1);
8531 IEM_MC_LOCAL(uint16_t, u16Value);
8532 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
8533 IEM_MC_PUSH_U16(u16Value);
8534 IEM_MC_ADVANCE_RIP();
8535 IEM_MC_END();
8536 break;
8537
8538 case IEMMODE_32BIT:
8539 IEM_MC_BEGIN(0, 1);
8540 IEM_MC_LOCAL(uint32_t, u32Value);
8541 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
8542 IEM_MC_PUSH_U32(u32Value);
8543 IEM_MC_ADVANCE_RIP();
8544 IEM_MC_END();
8545 break;
8546
8547 case IEMMODE_64BIT:
8548 IEM_MC_BEGIN(0, 1);
8549 IEM_MC_LOCAL(uint64_t, u64Value);
8550 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
8551 IEM_MC_PUSH_U64(u64Value);
8552 IEM_MC_ADVANCE_RIP();
8553 IEM_MC_END();
8554 break;
8555 }
8556
8557 return VINF_SUCCESS;
8558}
8559
8560
8561/** Opcode 0x50. */
8562FNIEMOP_DEF(iemOp_push_eAX)
8563{
8564 IEMOP_MNEMONIC(push_rAX, "push rAX");
8565 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
8566}
8567
8568
8569/** Opcode 0x51. */
8570FNIEMOP_DEF(iemOp_push_eCX)
8571{
8572 IEMOP_MNEMONIC(push_rCX, "push rCX");
8573 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
8574}
8575
8576
8577/** Opcode 0x52. */
8578FNIEMOP_DEF(iemOp_push_eDX)
8579{
8580 IEMOP_MNEMONIC(push_rDX, "push rDX");
8581 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
8582}
8583
8584
8585/** Opcode 0x53. */
8586FNIEMOP_DEF(iemOp_push_eBX)
8587{
8588 IEMOP_MNEMONIC(push_rBX, "push rBX");
8589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
8590}
8591
8592
8593/** Opcode 0x54. */
8594FNIEMOP_DEF(iemOp_push_eSP)
8595{
8596 IEMOP_MNEMONIC(push_rSP, "push rSP");
8597 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
8598 {
8599 IEM_MC_BEGIN(0, 1);
8600 IEM_MC_LOCAL(uint16_t, u16Value);
8601 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
8602 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
8603 IEM_MC_PUSH_U16(u16Value);
8604 IEM_MC_ADVANCE_RIP();
8605 IEM_MC_END();
8606 }
8607 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
8608}
8609
8610
8611/** Opcode 0x55. */
8612FNIEMOP_DEF(iemOp_push_eBP)
8613{
8614 IEMOP_MNEMONIC(push_rBP, "push rBP");
8615 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
8616}
8617
8618
8619/** Opcode 0x56. */
8620FNIEMOP_DEF(iemOp_push_eSI)
8621{
8622 IEMOP_MNEMONIC(push_rSI, "push rSI");
8623 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
8624}
8625
8626
8627/** Opcode 0x57. */
8628FNIEMOP_DEF(iemOp_push_eDI)
8629{
8630 IEMOP_MNEMONIC(push_rDI, "push rDI");
8631 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
8632}
8633
8634
8635/**
8636 * Common 'pop register' helper.
8637 */
8638FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
8639{
8640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8641 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8642 {
8643 iReg |= pVCpu->iem.s.uRexB;
8644 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8645 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8646 }
8647
8648 switch (pVCpu->iem.s.enmEffOpSize)
8649 {
8650 case IEMMODE_16BIT:
8651 IEM_MC_BEGIN(0, 1);
8652 IEM_MC_LOCAL(uint16_t *, pu16Dst);
8653 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
8654 IEM_MC_POP_U16(pu16Dst);
8655 IEM_MC_ADVANCE_RIP();
8656 IEM_MC_END();
8657 break;
8658
8659 case IEMMODE_32BIT:
8660 IEM_MC_BEGIN(0, 1);
8661 IEM_MC_LOCAL(uint32_t *, pu32Dst);
8662 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
8663 IEM_MC_POP_U32(pu32Dst);
8664 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
8665 IEM_MC_ADVANCE_RIP();
8666 IEM_MC_END();
8667 break;
8668
8669 case IEMMODE_64BIT:
8670 IEM_MC_BEGIN(0, 1);
8671 IEM_MC_LOCAL(uint64_t *, pu64Dst);
8672 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
8673 IEM_MC_POP_U64(pu64Dst);
8674 IEM_MC_ADVANCE_RIP();
8675 IEM_MC_END();
8676 break;
8677 }
8678
8679 return VINF_SUCCESS;
8680}
8681
8682
8683/** Opcode 0x58. */
8684FNIEMOP_DEF(iemOp_pop_eAX)
8685{
8686 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
8687 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
8688}
8689
8690
8691/** Opcode 0x59. */
8692FNIEMOP_DEF(iemOp_pop_eCX)
8693{
8694 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
8695 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
8696}
8697
8698
8699/** Opcode 0x5a. */
8700FNIEMOP_DEF(iemOp_pop_eDX)
8701{
8702 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
8703 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
8704}
8705
8706
8707/** Opcode 0x5b. */
8708FNIEMOP_DEF(iemOp_pop_eBX)
8709{
8710 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
8711 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8712}
8713
8714
8715/** Opcode 0x5c. */
8716FNIEMOP_DEF(iemOp_pop_eSP)
8717{
8718 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
8719 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
8720 {
8721 if (pVCpu->iem.s.uRexB)
8722 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8723 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
8724 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8725 }
8726
8727 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8728 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8729 /** @todo add testcase for this instruction. */
8730 switch (pVCpu->iem.s.enmEffOpSize)
8731 {
8732 case IEMMODE_16BIT:
8733 IEM_MC_BEGIN(0, 1);
8734 IEM_MC_LOCAL(uint16_t, u16Dst);
8735 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8736 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8737 IEM_MC_ADVANCE_RIP();
8738 IEM_MC_END();
8739 break;
8740
8741 case IEMMODE_32BIT:
8742 IEM_MC_BEGIN(0, 1);
8743 IEM_MC_LOCAL(uint32_t, u32Dst);
8744 IEM_MC_POP_U32(&u32Dst);
8745 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8746 IEM_MC_ADVANCE_RIP();
8747 IEM_MC_END();
8748 break;
8749
8750 case IEMMODE_64BIT:
8751 IEM_MC_BEGIN(0, 1);
8752 IEM_MC_LOCAL(uint64_t, u64Dst);
8753 IEM_MC_POP_U64(&u64Dst);
8754 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8755 IEM_MC_ADVANCE_RIP();
8756 IEM_MC_END();
8757 break;
8758 }
8759
8760 return VINF_SUCCESS;
8761}
8762
8763
8764/** Opcode 0x5d. */
8765FNIEMOP_DEF(iemOp_pop_eBP)
8766{
8767 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
8768 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8769}
8770
8771
8772/** Opcode 0x5e. */
8773FNIEMOP_DEF(iemOp_pop_eSI)
8774{
8775 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
8776 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8777}
8778
8779
8780/** Opcode 0x5f. */
8781FNIEMOP_DEF(iemOp_pop_eDI)
8782{
8783 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
8784 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8785}
8786
8787
8788/** Opcode 0x60. */
8789FNIEMOP_DEF(iemOp_pusha)
8790{
8791 IEMOP_MNEMONIC(pusha, "pusha");
8792 IEMOP_HLP_MIN_186();
8793 IEMOP_HLP_NO_64BIT();
8794 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8795 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8796 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8797 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8798}
8799
8800
8801/** Opcode 0x61. */
8802FNIEMOP_DEF(iemOp_popa)
8803{
8804 IEMOP_MNEMONIC(popa, "popa");
8805 IEMOP_HLP_MIN_186();
8806 IEMOP_HLP_NO_64BIT();
8807 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8808 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8809 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
8810 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8811}
8812
8813
8814/** Opcode 0x62. */
8815FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8816// IEMOP_HLP_MIN_186();
8817
8818
8819/** Opcode 0x63 - non-64-bit modes. */
8820FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8821{
8822 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
8823 IEMOP_HLP_MIN_286();
8824 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8826
8827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8828 {
8829 /* Register */
8830 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8831 IEM_MC_BEGIN(3, 0);
8832 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8833 IEM_MC_ARG(uint16_t, u16Src, 1);
8834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8835
8836 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8837 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8838 IEM_MC_REF_EFLAGS(pEFlags);
8839 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8840
8841 IEM_MC_ADVANCE_RIP();
8842 IEM_MC_END();
8843 }
8844 else
8845 {
8846 /* Memory */
8847 IEM_MC_BEGIN(3, 2);
8848 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8849 IEM_MC_ARG(uint16_t, u16Src, 1);
8850 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8852
8853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8854 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8855 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8856 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8857 IEM_MC_FETCH_EFLAGS(EFlags);
8858 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8859
8860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8861 IEM_MC_COMMIT_EFLAGS(EFlags);
8862 IEM_MC_ADVANCE_RIP();
8863 IEM_MC_END();
8864 }
8865 return VINF_SUCCESS;
8866
8867}
8868
8869
8870/** Opcode 0x63.
8871 * @note This is a weird one. It works like a regular move instruction if
8872 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8873 * @todo This definitely needs a testcase to verify the odd cases. */
8874FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8875{
8876 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8877
8878 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
8879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8880
8881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8882 {
8883 /*
8884 * Register to register.
8885 */
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 IEM_MC_BEGIN(0, 1);
8888 IEM_MC_LOCAL(uint64_t, u64Value);
8889 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
8890 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8891 IEM_MC_ADVANCE_RIP();
8892 IEM_MC_END();
8893 }
8894 else
8895 {
8896 /*
8897 * We're loading a register from memory.
8898 */
8899 IEM_MC_BEGIN(0, 2);
8900 IEM_MC_LOCAL(uint64_t, u64Value);
8901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8905 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 }
8909 return VINF_SUCCESS;
8910}
8911
8912
8913/** Opcode 0x64. */
8914FNIEMOP_DEF(iemOp_seg_FS)
8915{
8916 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8917 IEMOP_HLP_MIN_386();
8918
8919 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
8920 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
8921
8922 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8923 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8924}
8925
8926
8927/** Opcode 0x65. */
8928FNIEMOP_DEF(iemOp_seg_GS)
8929{
8930 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8931 IEMOP_HLP_MIN_386();
8932
8933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
8934 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
8935
8936 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8937 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8938}
8939
8940
8941/** Opcode 0x66. */
8942FNIEMOP_DEF(iemOp_op_size)
8943{
8944 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8945 IEMOP_HLP_MIN_386();
8946
8947 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
8948 iemRecalEffOpSize(pVCpu);
8949
8950 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8951 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8952}
8953
8954
8955/** Opcode 0x67. */
8956FNIEMOP_DEF(iemOp_addr_size)
8957{
8958 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8959 IEMOP_HLP_MIN_386();
8960
8961 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8962 switch (pVCpu->iem.s.enmDefAddrMode)
8963 {
8964 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8965 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
8966 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
8967 default: AssertFailed();
8968 }
8969
8970 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8971 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8972}
8973
8974
8975/** Opcode 0x68. */
8976FNIEMOP_DEF(iemOp_push_Iz)
8977{
8978 IEMOP_MNEMONIC(push_Iz, "push Iz");
8979 IEMOP_HLP_MIN_186();
8980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8981 switch (pVCpu->iem.s.enmEffOpSize)
8982 {
8983 case IEMMODE_16BIT:
8984 {
8985 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8987 IEM_MC_BEGIN(0,0);
8988 IEM_MC_PUSH_U16(u16Imm);
8989 IEM_MC_ADVANCE_RIP();
8990 IEM_MC_END();
8991 return VINF_SUCCESS;
8992 }
8993
8994 case IEMMODE_32BIT:
8995 {
8996 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8998 IEM_MC_BEGIN(0,0);
8999 IEM_MC_PUSH_U32(u32Imm);
9000 IEM_MC_ADVANCE_RIP();
9001 IEM_MC_END();
9002 return VINF_SUCCESS;
9003 }
9004
9005 case IEMMODE_64BIT:
9006 {
9007 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9009 IEM_MC_BEGIN(0,0);
9010 IEM_MC_PUSH_U64(u64Imm);
9011 IEM_MC_ADVANCE_RIP();
9012 IEM_MC_END();
9013 return VINF_SUCCESS;
9014 }
9015
9016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9017 }
9018}
9019
9020
9021/** Opcode 0x69. */
9022FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
9023{
9024 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
9025 IEMOP_HLP_MIN_186();
9026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9027 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9028
9029 switch (pVCpu->iem.s.enmEffOpSize)
9030 {
9031 case IEMMODE_16BIT:
9032 {
9033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9034 {
9035 /* register operand */
9036 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9038
9039 IEM_MC_BEGIN(3, 1);
9040 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9041 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
9042 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9043 IEM_MC_LOCAL(uint16_t, u16Tmp);
9044
9045 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9046 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9047 IEM_MC_REF_EFLAGS(pEFlags);
9048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9049 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9050
9051 IEM_MC_ADVANCE_RIP();
9052 IEM_MC_END();
9053 }
9054 else
9055 {
9056 /* memory operand */
9057 IEM_MC_BEGIN(3, 2);
9058 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9059 IEM_MC_ARG(uint16_t, u16Src, 1);
9060 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9061 IEM_MC_LOCAL(uint16_t, u16Tmp);
9062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9063
9064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9065 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9066 IEM_MC_ASSIGN(u16Src, u16Imm);
9067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9068 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9069 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9070 IEM_MC_REF_EFLAGS(pEFlags);
9071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9072 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9073
9074 IEM_MC_ADVANCE_RIP();
9075 IEM_MC_END();
9076 }
9077 return VINF_SUCCESS;
9078 }
9079
9080 case IEMMODE_32BIT:
9081 {
9082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9083 {
9084 /* register operand */
9085 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9087
9088 IEM_MC_BEGIN(3, 1);
9089 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9090 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
9091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9092 IEM_MC_LOCAL(uint32_t, u32Tmp);
9093
9094 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9095 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9096 IEM_MC_REF_EFLAGS(pEFlags);
9097 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9099
9100 IEM_MC_ADVANCE_RIP();
9101 IEM_MC_END();
9102 }
9103 else
9104 {
9105 /* memory operand */
9106 IEM_MC_BEGIN(3, 2);
9107 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9108 IEM_MC_ARG(uint32_t, u32Src, 1);
9109 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9110 IEM_MC_LOCAL(uint32_t, u32Tmp);
9111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9112
9113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9114 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9115 IEM_MC_ASSIGN(u32Src, u32Imm);
9116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9117 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9118 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9119 IEM_MC_REF_EFLAGS(pEFlags);
9120 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9121 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9122
9123 IEM_MC_ADVANCE_RIP();
9124 IEM_MC_END();
9125 }
9126 return VINF_SUCCESS;
9127 }
9128
9129 case IEMMODE_64BIT:
9130 {
9131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9132 {
9133 /* register operand */
9134 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9136
9137 IEM_MC_BEGIN(3, 1);
9138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9139 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
9140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9141 IEM_MC_LOCAL(uint64_t, u64Tmp);
9142
9143 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9144 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9145 IEM_MC_REF_EFLAGS(pEFlags);
9146 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9147 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9148
9149 IEM_MC_ADVANCE_RIP();
9150 IEM_MC_END();
9151 }
9152 else
9153 {
9154 /* memory operand */
9155 IEM_MC_BEGIN(3, 2);
9156 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9157 IEM_MC_ARG(uint64_t, u64Src, 1);
9158 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9159 IEM_MC_LOCAL(uint64_t, u64Tmp);
9160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9161
9162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9163 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9164 IEM_MC_ASSIGN(u64Src, u64Imm);
9165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9166 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9167 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9168 IEM_MC_REF_EFLAGS(pEFlags);
9169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9170 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9171
9172 IEM_MC_ADVANCE_RIP();
9173 IEM_MC_END();
9174 }
9175 return VINF_SUCCESS;
9176 }
9177 }
9178 AssertFailedReturn(VERR_IEM_IPE_9);
9179}
9180
9181
9182/** Opcode 0x6a. */
9183FNIEMOP_DEF(iemOp_push_Ib)
9184{
9185 IEMOP_MNEMONIC(push_Ib, "push Ib");
9186 IEMOP_HLP_MIN_186();
9187 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9189 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9190
9191 IEM_MC_BEGIN(0,0);
9192 switch (pVCpu->iem.s.enmEffOpSize)
9193 {
9194 case IEMMODE_16BIT:
9195 IEM_MC_PUSH_U16(i8Imm);
9196 break;
9197 case IEMMODE_32BIT:
9198 IEM_MC_PUSH_U32(i8Imm);
9199 break;
9200 case IEMMODE_64BIT:
9201 IEM_MC_PUSH_U64(i8Imm);
9202 break;
9203 }
9204 IEM_MC_ADVANCE_RIP();
9205 IEM_MC_END();
9206 return VINF_SUCCESS;
9207}
9208
9209
9210/** Opcode 0x6b. */
9211FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
9212{
9213 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
9214 IEMOP_HLP_MIN_186();
9215 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
9217
9218 switch (pVCpu->iem.s.enmEffOpSize)
9219 {
9220 case IEMMODE_16BIT:
9221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9222 {
9223 /* register operand */
9224 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9226
9227 IEM_MC_BEGIN(3, 1);
9228 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9229 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
9230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9231 IEM_MC_LOCAL(uint16_t, u16Tmp);
9232
9233 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9234 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9235 IEM_MC_REF_EFLAGS(pEFlags);
9236 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9237 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9238
9239 IEM_MC_ADVANCE_RIP();
9240 IEM_MC_END();
9241 }
9242 else
9243 {
9244 /* memory operand */
9245 IEM_MC_BEGIN(3, 2);
9246 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9247 IEM_MC_ARG(uint16_t, u16Src, 1);
9248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9249 IEM_MC_LOCAL(uint16_t, u16Tmp);
9250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9251
9252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9253 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
9254 IEM_MC_ASSIGN(u16Src, u16Imm);
9255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9256 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9257 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
9258 IEM_MC_REF_EFLAGS(pEFlags);
9259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
9260 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
9261
9262 IEM_MC_ADVANCE_RIP();
9263 IEM_MC_END();
9264 }
9265 return VINF_SUCCESS;
9266
9267 case IEMMODE_32BIT:
9268 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9269 {
9270 /* register operand */
9271 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9273
9274 IEM_MC_BEGIN(3, 1);
9275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9276 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
9277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9278 IEM_MC_LOCAL(uint32_t, u32Tmp);
9279
9280 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9281 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9282 IEM_MC_REF_EFLAGS(pEFlags);
9283 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9284 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9285
9286 IEM_MC_ADVANCE_RIP();
9287 IEM_MC_END();
9288 }
9289 else
9290 {
9291 /* memory operand */
9292 IEM_MC_BEGIN(3, 2);
9293 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9294 IEM_MC_ARG(uint32_t, u32Src, 1);
9295 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9296 IEM_MC_LOCAL(uint32_t, u32Tmp);
9297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9298
9299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9300 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
9301 IEM_MC_ASSIGN(u32Src, u32Imm);
9302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9303 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9304 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
9305 IEM_MC_REF_EFLAGS(pEFlags);
9306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
9307 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
9308
9309 IEM_MC_ADVANCE_RIP();
9310 IEM_MC_END();
9311 }
9312 return VINF_SUCCESS;
9313
9314 case IEMMODE_64BIT:
9315 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9316 {
9317 /* register operand */
9318 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9320
9321 IEM_MC_BEGIN(3, 1);
9322 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9323 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
9324 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9325 IEM_MC_LOCAL(uint64_t, u64Tmp);
9326
9327 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9328 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9329 IEM_MC_REF_EFLAGS(pEFlags);
9330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9331 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9332
9333 IEM_MC_ADVANCE_RIP();
9334 IEM_MC_END();
9335 }
9336 else
9337 {
9338 /* memory operand */
9339 IEM_MC_BEGIN(3, 2);
9340 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9341 IEM_MC_ARG(uint64_t, u64Src, 1);
9342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9343 IEM_MC_LOCAL(uint64_t, u64Tmp);
9344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9345
9346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9347 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
9348 IEM_MC_ASSIGN(u64Src, u64Imm);
9349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9350 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9351 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
9352 IEM_MC_REF_EFLAGS(pEFlags);
9353 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
9354 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
9355
9356 IEM_MC_ADVANCE_RIP();
9357 IEM_MC_END();
9358 }
9359 return VINF_SUCCESS;
9360 }
9361 AssertFailedReturn(VERR_IEM_IPE_8);
9362}
9363
9364
9365/** Opcode 0x6c. */
9366FNIEMOP_DEF(iemOp_insb_Yb_DX)
9367{
9368 IEMOP_HLP_MIN_186();
9369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9370 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9371 {
9372 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
9373 switch (pVCpu->iem.s.enmEffAddrMode)
9374 {
9375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
9376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
9377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
9378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9379 }
9380 }
9381 else
9382 {
9383 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
9384 switch (pVCpu->iem.s.enmEffAddrMode)
9385 {
9386 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
9387 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
9388 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
9389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9390 }
9391 }
9392}
9393
9394
9395/** Opcode 0x6d. */
9396FNIEMOP_DEF(iemOp_inswd_Yv_DX)
9397{
9398 IEMOP_HLP_MIN_186();
9399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9400 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9401 {
9402 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
9403 switch (pVCpu->iem.s.enmEffOpSize)
9404 {
9405 case IEMMODE_16BIT:
9406 switch (pVCpu->iem.s.enmEffAddrMode)
9407 {
9408 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
9409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
9410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
9411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9412 }
9413 break;
9414 case IEMMODE_64BIT:
9415 case IEMMODE_32BIT:
9416 switch (pVCpu->iem.s.enmEffAddrMode)
9417 {
9418 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
9419 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
9420 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
9421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9422 }
9423 break;
9424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9425 }
9426 }
9427 else
9428 {
9429 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
9430 switch (pVCpu->iem.s.enmEffOpSize)
9431 {
9432 case IEMMODE_16BIT:
9433 switch (pVCpu->iem.s.enmEffAddrMode)
9434 {
9435 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
9436 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
9437 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
9438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9439 }
9440 break;
9441 case IEMMODE_64BIT:
9442 case IEMMODE_32BIT:
9443 switch (pVCpu->iem.s.enmEffAddrMode)
9444 {
9445 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
9446 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
9447 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
9448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9449 }
9450 break;
9451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9452 }
9453 }
9454}
9455
9456
9457/** Opcode 0x6e. */
9458FNIEMOP_DEF(iemOp_outsb_Yb_DX)
9459{
9460 IEMOP_HLP_MIN_186();
9461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9462 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9463 {
9464 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
9465 switch (pVCpu->iem.s.enmEffAddrMode)
9466 {
9467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9471 }
9472 }
9473 else
9474 {
9475 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
9476 switch (pVCpu->iem.s.enmEffAddrMode)
9477 {
9478 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
9479 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
9480 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
9481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9482 }
9483 }
9484}
9485
9486
9487/** Opcode 0x6f. */
9488FNIEMOP_DEF(iemOp_outswd_Yv_DX)
9489{
9490 IEMOP_HLP_MIN_186();
9491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9492 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
9493 {
9494 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
9495 switch (pVCpu->iem.s.enmEffOpSize)
9496 {
9497 case IEMMODE_16BIT:
9498 switch (pVCpu->iem.s.enmEffAddrMode)
9499 {
9500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9504 }
9505 break;
9506 case IEMMODE_64BIT:
9507 case IEMMODE_32BIT:
9508 switch (pVCpu->iem.s.enmEffAddrMode)
9509 {
9510 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9511 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9512 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9514 }
9515 break;
9516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9517 }
9518 }
9519 else
9520 {
9521 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
9522 switch (pVCpu->iem.s.enmEffOpSize)
9523 {
9524 case IEMMODE_16BIT:
9525 switch (pVCpu->iem.s.enmEffAddrMode)
9526 {
9527 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
9528 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
9529 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
9530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9531 }
9532 break;
9533 case IEMMODE_64BIT:
9534 case IEMMODE_32BIT:
9535 switch (pVCpu->iem.s.enmEffAddrMode)
9536 {
9537 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
9538 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
9539 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
9540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9541 }
9542 break;
9543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9544 }
9545 }
9546}
9547
9548
9549/** Opcode 0x70. */
9550FNIEMOP_DEF(iemOp_jo_Jb)
9551{
9552 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
9553 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9556
9557 IEM_MC_BEGIN(0, 0);
9558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9559 IEM_MC_REL_JMP_S8(i8Imm);
9560 } IEM_MC_ELSE() {
9561 IEM_MC_ADVANCE_RIP();
9562 } IEM_MC_ENDIF();
9563 IEM_MC_END();
9564 return VINF_SUCCESS;
9565}
9566
9567
9568/** Opcode 0x71. */
9569FNIEMOP_DEF(iemOp_jno_Jb)
9570{
9571 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
9572 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9574 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9575
9576 IEM_MC_BEGIN(0, 0);
9577 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
9578 IEM_MC_ADVANCE_RIP();
9579 } IEM_MC_ELSE() {
9580 IEM_MC_REL_JMP_S8(i8Imm);
9581 } IEM_MC_ENDIF();
9582 IEM_MC_END();
9583 return VINF_SUCCESS;
9584}
9585
9586/** Opcode 0x72. */
9587FNIEMOP_DEF(iemOp_jc_Jb)
9588{
9589 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
9590 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9592 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9593
9594 IEM_MC_BEGIN(0, 0);
9595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9596 IEM_MC_REL_JMP_S8(i8Imm);
9597 } IEM_MC_ELSE() {
9598 IEM_MC_ADVANCE_RIP();
9599 } IEM_MC_ENDIF();
9600 IEM_MC_END();
9601 return VINF_SUCCESS;
9602}
9603
9604
9605/** Opcode 0x73. */
9606FNIEMOP_DEF(iemOp_jnc_Jb)
9607{
9608 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
9609 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9612
9613 IEM_MC_BEGIN(0, 0);
9614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9615 IEM_MC_ADVANCE_RIP();
9616 } IEM_MC_ELSE() {
9617 IEM_MC_REL_JMP_S8(i8Imm);
9618 } IEM_MC_ENDIF();
9619 IEM_MC_END();
9620 return VINF_SUCCESS;
9621}
9622
9623
9624/** Opcode 0x74. */
9625FNIEMOP_DEF(iemOp_je_Jb)
9626{
9627 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
9628 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9631
9632 IEM_MC_BEGIN(0, 0);
9633 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9634 IEM_MC_REL_JMP_S8(i8Imm);
9635 } IEM_MC_ELSE() {
9636 IEM_MC_ADVANCE_RIP();
9637 } IEM_MC_ENDIF();
9638 IEM_MC_END();
9639 return VINF_SUCCESS;
9640}
9641
9642
9643/** Opcode 0x75. */
9644FNIEMOP_DEF(iemOp_jne_Jb)
9645{
9646 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
9647 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9650
9651 IEM_MC_BEGIN(0, 0);
9652 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9653 IEM_MC_ADVANCE_RIP();
9654 } IEM_MC_ELSE() {
9655 IEM_MC_REL_JMP_S8(i8Imm);
9656 } IEM_MC_ENDIF();
9657 IEM_MC_END();
9658 return VINF_SUCCESS;
9659}
9660
9661
9662/** Opcode 0x76. */
9663FNIEMOP_DEF(iemOp_jbe_Jb)
9664{
9665 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
9666 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9668 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9669
9670 IEM_MC_BEGIN(0, 0);
9671 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9672 IEM_MC_REL_JMP_S8(i8Imm);
9673 } IEM_MC_ELSE() {
9674 IEM_MC_ADVANCE_RIP();
9675 } IEM_MC_ENDIF();
9676 IEM_MC_END();
9677 return VINF_SUCCESS;
9678}
9679
9680
9681/** Opcode 0x77. */
9682FNIEMOP_DEF(iemOp_jnbe_Jb)
9683{
9684 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
9685 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9687 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9688
9689 IEM_MC_BEGIN(0, 0);
9690 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9691 IEM_MC_ADVANCE_RIP();
9692 } IEM_MC_ELSE() {
9693 IEM_MC_REL_JMP_S8(i8Imm);
9694 } IEM_MC_ENDIF();
9695 IEM_MC_END();
9696 return VINF_SUCCESS;
9697}
9698
9699
9700/** Opcode 0x78. */
9701FNIEMOP_DEF(iemOp_js_Jb)
9702{
9703 IEMOP_MNEMONIC(js_Jb, "js Jb");
9704 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9706 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9707
9708 IEM_MC_BEGIN(0, 0);
9709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9710 IEM_MC_REL_JMP_S8(i8Imm);
9711 } IEM_MC_ELSE() {
9712 IEM_MC_ADVANCE_RIP();
9713 } IEM_MC_ENDIF();
9714 IEM_MC_END();
9715 return VINF_SUCCESS;
9716}
9717
9718
9719/** Opcode 0x79. */
9720FNIEMOP_DEF(iemOp_jns_Jb)
9721{
9722 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
9723 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9726
9727 IEM_MC_BEGIN(0, 0);
9728 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9729 IEM_MC_ADVANCE_RIP();
9730 } IEM_MC_ELSE() {
9731 IEM_MC_REL_JMP_S8(i8Imm);
9732 } IEM_MC_ENDIF();
9733 IEM_MC_END();
9734 return VINF_SUCCESS;
9735}
9736
9737
9738/** Opcode 0x7a. */
9739FNIEMOP_DEF(iemOp_jp_Jb)
9740{
9741 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
9742 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9745
9746 IEM_MC_BEGIN(0, 0);
9747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9748 IEM_MC_REL_JMP_S8(i8Imm);
9749 } IEM_MC_ELSE() {
9750 IEM_MC_ADVANCE_RIP();
9751 } IEM_MC_ENDIF();
9752 IEM_MC_END();
9753 return VINF_SUCCESS;
9754}
9755
9756
9757/** Opcode 0x7b. */
9758FNIEMOP_DEF(iemOp_jnp_Jb)
9759{
9760 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
9761 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9764
9765 IEM_MC_BEGIN(0, 0);
9766 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9767 IEM_MC_ADVANCE_RIP();
9768 } IEM_MC_ELSE() {
9769 IEM_MC_REL_JMP_S8(i8Imm);
9770 } IEM_MC_ENDIF();
9771 IEM_MC_END();
9772 return VINF_SUCCESS;
9773}
9774
9775
9776/** Opcode 0x7c. */
9777FNIEMOP_DEF(iemOp_jl_Jb)
9778{
9779 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
9780 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9782 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9783
9784 IEM_MC_BEGIN(0, 0);
9785 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9786 IEM_MC_REL_JMP_S8(i8Imm);
9787 } IEM_MC_ELSE() {
9788 IEM_MC_ADVANCE_RIP();
9789 } IEM_MC_ENDIF();
9790 IEM_MC_END();
9791 return VINF_SUCCESS;
9792}
9793
9794
9795/** Opcode 0x7d. */
9796FNIEMOP_DEF(iemOp_jnl_Jb)
9797{
9798 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
9799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9802
9803 IEM_MC_BEGIN(0, 0);
9804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9805 IEM_MC_ADVANCE_RIP();
9806 } IEM_MC_ELSE() {
9807 IEM_MC_REL_JMP_S8(i8Imm);
9808 } IEM_MC_ENDIF();
9809 IEM_MC_END();
9810 return VINF_SUCCESS;
9811}
9812
9813
9814/** Opcode 0x7e. */
9815FNIEMOP_DEF(iemOp_jle_Jb)
9816{
9817 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
9818 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9821
9822 IEM_MC_BEGIN(0, 0);
9823 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9824 IEM_MC_REL_JMP_S8(i8Imm);
9825 } IEM_MC_ELSE() {
9826 IEM_MC_ADVANCE_RIP();
9827 } IEM_MC_ENDIF();
9828 IEM_MC_END();
9829 return VINF_SUCCESS;
9830}
9831
9832
9833/** Opcode 0x7f. */
9834FNIEMOP_DEF(iemOp_jnle_Jb)
9835{
9836 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
9837 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9840
9841 IEM_MC_BEGIN(0, 0);
9842 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9843 IEM_MC_ADVANCE_RIP();
9844 } IEM_MC_ELSE() {
9845 IEM_MC_REL_JMP_S8(i8Imm);
9846 } IEM_MC_ENDIF();
9847 IEM_MC_END();
9848 return VINF_SUCCESS;
9849}
9850
9851
9852/** Opcode 0x80. */
9853FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9854{
9855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9856 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9857 {
9858 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
9859 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
9860 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
9861 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
9862 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
9863 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
9864 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
9865 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
9866 }
9867 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9868
9869 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9870 {
9871 /* register target */
9872 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_BEGIN(3, 0);
9875 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9876 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9878
9879 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9880 IEM_MC_REF_EFLAGS(pEFlags);
9881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9882
9883 IEM_MC_ADVANCE_RIP();
9884 IEM_MC_END();
9885 }
9886 else
9887 {
9888 /* memory target */
9889 uint32_t fAccess;
9890 if (pImpl->pfnLockedU8)
9891 fAccess = IEM_ACCESS_DATA_RW;
9892 else /* CMP */
9893 fAccess = IEM_ACCESS_DATA_R;
9894 IEM_MC_BEGIN(3, 2);
9895 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9896 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9898
9899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9900 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9901 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9902 if (pImpl->pfnLockedU8)
9903 IEMOP_HLP_DONE_DECODING();
9904 else
9905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9906
9907 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9908 IEM_MC_FETCH_EFLAGS(EFlags);
9909 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9911 else
9912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9913
9914 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9915 IEM_MC_COMMIT_EFLAGS(EFlags);
9916 IEM_MC_ADVANCE_RIP();
9917 IEM_MC_END();
9918 }
9919 return VINF_SUCCESS;
9920}
9921
9922
9923/** Opcode 0x81. */
9924FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9925{
9926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9927 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9928 {
9929 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
9930 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
9931 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
9932 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
9933 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
9934 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
9935 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
9936 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
9937 }
9938 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9939
9940 switch (pVCpu->iem.s.enmEffOpSize)
9941 {
9942 case IEMMODE_16BIT:
9943 {
9944 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9945 {
9946 /* register target */
9947 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9949 IEM_MC_BEGIN(3, 0);
9950 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9951 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9952 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9953
9954 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9955 IEM_MC_REF_EFLAGS(pEFlags);
9956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9957
9958 IEM_MC_ADVANCE_RIP();
9959 IEM_MC_END();
9960 }
9961 else
9962 {
9963 /* memory target */
9964 uint32_t fAccess;
9965 if (pImpl->pfnLockedU16)
9966 fAccess = IEM_ACCESS_DATA_RW;
9967 else /* CMP, TEST */
9968 fAccess = IEM_ACCESS_DATA_R;
9969 IEM_MC_BEGIN(3, 2);
9970 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9971 IEM_MC_ARG(uint16_t, u16Src, 1);
9972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9974
9975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9976 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9977 IEM_MC_ASSIGN(u16Src, u16Imm);
9978 if (pImpl->pfnLockedU16)
9979 IEMOP_HLP_DONE_DECODING();
9980 else
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9983 IEM_MC_FETCH_EFLAGS(EFlags);
9984 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9985 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9986 else
9987 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9988
9989 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9990 IEM_MC_COMMIT_EFLAGS(EFlags);
9991 IEM_MC_ADVANCE_RIP();
9992 IEM_MC_END();
9993 }
9994 break;
9995 }
9996
9997 case IEMMODE_32BIT:
9998 {
9999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10000 {
10001 /* register target */
10002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10004 IEM_MC_BEGIN(3, 0);
10005 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10006 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
10007 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10008
10009 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10010 IEM_MC_REF_EFLAGS(pEFlags);
10011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10012 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10013
10014 IEM_MC_ADVANCE_RIP();
10015 IEM_MC_END();
10016 }
10017 else
10018 {
10019 /* memory target */
10020 uint32_t fAccess;
10021 if (pImpl->pfnLockedU32)
10022 fAccess = IEM_ACCESS_DATA_RW;
10023 else /* CMP, TEST */
10024 fAccess = IEM_ACCESS_DATA_R;
10025 IEM_MC_BEGIN(3, 2);
10026 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10027 IEM_MC_ARG(uint32_t, u32Src, 1);
10028 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10030
10031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10033 IEM_MC_ASSIGN(u32Src, u32Imm);
10034 if (pImpl->pfnLockedU32)
10035 IEMOP_HLP_DONE_DECODING();
10036 else
10037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10038 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10039 IEM_MC_FETCH_EFLAGS(EFlags);
10040 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10042 else
10043 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10044
10045 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10046 IEM_MC_COMMIT_EFLAGS(EFlags);
10047 IEM_MC_ADVANCE_RIP();
10048 IEM_MC_END();
10049 }
10050 break;
10051 }
10052
10053 case IEMMODE_64BIT:
10054 {
10055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10056 {
10057 /* register target */
10058 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10060 IEM_MC_BEGIN(3, 0);
10061 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10062 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
10063 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10064
10065 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10066 IEM_MC_REF_EFLAGS(pEFlags);
10067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10068
10069 IEM_MC_ADVANCE_RIP();
10070 IEM_MC_END();
10071 }
10072 else
10073 {
10074 /* memory target */
10075 uint32_t fAccess;
10076 if (pImpl->pfnLockedU64)
10077 fAccess = IEM_ACCESS_DATA_RW;
10078 else /* CMP */
10079 fAccess = IEM_ACCESS_DATA_R;
10080 IEM_MC_BEGIN(3, 2);
10081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10082 IEM_MC_ARG(uint64_t, u64Src, 1);
10083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10085
10086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10087 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10088 if (pImpl->pfnLockedU64)
10089 IEMOP_HLP_DONE_DECODING();
10090 else
10091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10092 IEM_MC_ASSIGN(u64Src, u64Imm);
10093 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10094 IEM_MC_FETCH_EFLAGS(EFlags);
10095 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10096 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10097 else
10098 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10099
10100 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10101 IEM_MC_COMMIT_EFLAGS(EFlags);
10102 IEM_MC_ADVANCE_RIP();
10103 IEM_MC_END();
10104 }
10105 break;
10106 }
10107 }
10108 return VINF_SUCCESS;
10109}
10110
10111
10112/** Opcode 0x82. */
10113FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
10114{
10115 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
10116 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
10117}
10118
10119
10120/** Opcode 0x83. */
10121FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
10122{
10123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10124 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10125 {
10126 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
10127 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
10128 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
10129 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
10130 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
10131 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
10132 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
10133 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
10134 }
10135 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
10136 to the 386 even if absent in the intel reference manuals and some
10137 3rd party opcode listings. */
10138 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
10139
10140 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10141 {
10142 /*
10143 * Register target
10144 */
10145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10147 switch (pVCpu->iem.s.enmEffOpSize)
10148 {
10149 case IEMMODE_16BIT:
10150 {
10151 IEM_MC_BEGIN(3, 0);
10152 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10153 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
10154 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10155
10156 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10157 IEM_MC_REF_EFLAGS(pEFlags);
10158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10159
10160 IEM_MC_ADVANCE_RIP();
10161 IEM_MC_END();
10162 break;
10163 }
10164
10165 case IEMMODE_32BIT:
10166 {
10167 IEM_MC_BEGIN(3, 0);
10168 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10169 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
10170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10171
10172 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10173 IEM_MC_REF_EFLAGS(pEFlags);
10174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10175 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10176
10177 IEM_MC_ADVANCE_RIP();
10178 IEM_MC_END();
10179 break;
10180 }
10181
10182 case IEMMODE_64BIT:
10183 {
10184 IEM_MC_BEGIN(3, 0);
10185 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10186 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
10187 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10188
10189 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10190 IEM_MC_REF_EFLAGS(pEFlags);
10191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10192
10193 IEM_MC_ADVANCE_RIP();
10194 IEM_MC_END();
10195 break;
10196 }
10197 }
10198 }
10199 else
10200 {
10201 /*
10202 * Memory target.
10203 */
10204 uint32_t fAccess;
10205 if (pImpl->pfnLockedU16)
10206 fAccess = IEM_ACCESS_DATA_RW;
10207 else /* CMP */
10208 fAccess = IEM_ACCESS_DATA_R;
10209
10210 switch (pVCpu->iem.s.enmEffOpSize)
10211 {
10212 case IEMMODE_16BIT:
10213 {
10214 IEM_MC_BEGIN(3, 2);
10215 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10216 IEM_MC_ARG(uint16_t, u16Src, 1);
10217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10219
10220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10221 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10222 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
10223 if (pImpl->pfnLockedU16)
10224 IEMOP_HLP_DONE_DECODING();
10225 else
10226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10227 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10228 IEM_MC_FETCH_EFLAGS(EFlags);
10229 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
10231 else
10232 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
10233
10234 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
10235 IEM_MC_COMMIT_EFLAGS(EFlags);
10236 IEM_MC_ADVANCE_RIP();
10237 IEM_MC_END();
10238 break;
10239 }
10240
10241 case IEMMODE_32BIT:
10242 {
10243 IEM_MC_BEGIN(3, 2);
10244 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10245 IEM_MC_ARG(uint32_t, u32Src, 1);
10246 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10248
10249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10250 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10251 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
10252 if (pImpl->pfnLockedU32)
10253 IEMOP_HLP_DONE_DECODING();
10254 else
10255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10256 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10257 IEM_MC_FETCH_EFLAGS(EFlags);
10258 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
10260 else
10261 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
10262
10263 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
10264 IEM_MC_COMMIT_EFLAGS(EFlags);
10265 IEM_MC_ADVANCE_RIP();
10266 IEM_MC_END();
10267 break;
10268 }
10269
10270 case IEMMODE_64BIT:
10271 {
10272 IEM_MC_BEGIN(3, 2);
10273 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10274 IEM_MC_ARG(uint64_t, u64Src, 1);
10275 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10277
10278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10279 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10280 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
10281 if (pImpl->pfnLockedU64)
10282 IEMOP_HLP_DONE_DECODING();
10283 else
10284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10285 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10286 IEM_MC_FETCH_EFLAGS(EFlags);
10287 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
10289 else
10290 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
10291
10292 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
10293 IEM_MC_COMMIT_EFLAGS(EFlags);
10294 IEM_MC_ADVANCE_RIP();
10295 IEM_MC_END();
10296 break;
10297 }
10298 }
10299 }
10300 return VINF_SUCCESS;
10301}
10302
10303
10304/** Opcode 0x84. */
10305FNIEMOP_DEF(iemOp_test_Eb_Gb)
10306{
10307 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
10308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10309 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
10310}
10311
10312
10313/** Opcode 0x85. */
10314FNIEMOP_DEF(iemOp_test_Ev_Gv)
10315{
10316 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
10317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10318 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
10319}
10320
10321
10322/** Opcode 0x86. */
10323FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
10324{
10325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10326 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
10327
10328 /*
10329 * If rm is denoting a register, no more instruction bytes.
10330 */
10331 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10332 {
10333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10334
10335 IEM_MC_BEGIN(0, 2);
10336 IEM_MC_LOCAL(uint8_t, uTmp1);
10337 IEM_MC_LOCAL(uint8_t, uTmp2);
10338
10339 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10340 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10341 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10342 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10343
10344 IEM_MC_ADVANCE_RIP();
10345 IEM_MC_END();
10346 }
10347 else
10348 {
10349 /*
10350 * We're accessing memory.
10351 */
10352/** @todo the register must be committed separately! */
10353 IEM_MC_BEGIN(2, 2);
10354 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
10355 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
10356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10357
10358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10359 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10360 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10361 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
10362 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
10363
10364 IEM_MC_ADVANCE_RIP();
10365 IEM_MC_END();
10366 }
10367 return VINF_SUCCESS;
10368}
10369
10370
10371/** Opcode 0x87. */
10372FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
10373{
10374 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
10375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10376
10377 /*
10378 * If rm is denoting a register, no more instruction bytes.
10379 */
10380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10381 {
10382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10383
10384 switch (pVCpu->iem.s.enmEffOpSize)
10385 {
10386 case IEMMODE_16BIT:
10387 IEM_MC_BEGIN(0, 2);
10388 IEM_MC_LOCAL(uint16_t, uTmp1);
10389 IEM_MC_LOCAL(uint16_t, uTmp2);
10390
10391 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10392 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10393 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10394 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10395
10396 IEM_MC_ADVANCE_RIP();
10397 IEM_MC_END();
10398 return VINF_SUCCESS;
10399
10400 case IEMMODE_32BIT:
10401 IEM_MC_BEGIN(0, 2);
10402 IEM_MC_LOCAL(uint32_t, uTmp1);
10403 IEM_MC_LOCAL(uint32_t, uTmp2);
10404
10405 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10406 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10407 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10408 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10409
10410 IEM_MC_ADVANCE_RIP();
10411 IEM_MC_END();
10412 return VINF_SUCCESS;
10413
10414 case IEMMODE_64BIT:
10415 IEM_MC_BEGIN(0, 2);
10416 IEM_MC_LOCAL(uint64_t, uTmp1);
10417 IEM_MC_LOCAL(uint64_t, uTmp2);
10418
10419 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10420 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10421 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
10422 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
10423
10424 IEM_MC_ADVANCE_RIP();
10425 IEM_MC_END();
10426 return VINF_SUCCESS;
10427
10428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10429 }
10430 }
10431 else
10432 {
10433 /*
10434 * We're accessing memory.
10435 */
10436 switch (pVCpu->iem.s.enmEffOpSize)
10437 {
10438/** @todo the register must be committed separately! */
10439 case IEMMODE_16BIT:
10440 IEM_MC_BEGIN(2, 2);
10441 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
10442 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
10443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10444
10445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10446 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10447 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10448 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
10449 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
10450
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 return VINF_SUCCESS;
10454
10455 case IEMMODE_32BIT:
10456 IEM_MC_BEGIN(2, 2);
10457 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
10458 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
10459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10460
10461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10462 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10463 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10464 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
10465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
10466
10467 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
10468 IEM_MC_ADVANCE_RIP();
10469 IEM_MC_END();
10470 return VINF_SUCCESS;
10471
10472 case IEMMODE_64BIT:
10473 IEM_MC_BEGIN(2, 2);
10474 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
10475 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
10476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10477
10478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10479 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10480 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10481 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
10482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
10483
10484 IEM_MC_ADVANCE_RIP();
10485 IEM_MC_END();
10486 return VINF_SUCCESS;
10487
10488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10489 }
10490 }
10491}
10492
10493
10494/** Opcode 0x88. */
10495FNIEMOP_DEF(iemOp_mov_Eb_Gb)
10496{
10497 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
10498
10499 uint8_t bRm;
10500 IEM_OPCODE_GET_NEXT_U8(&bRm);
10501
10502 /*
10503 * If rm is denoting a register, no more instruction bytes.
10504 */
10505 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10506 {
10507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10508 IEM_MC_BEGIN(0, 1);
10509 IEM_MC_LOCAL(uint8_t, u8Value);
10510 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10511 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
10512 IEM_MC_ADVANCE_RIP();
10513 IEM_MC_END();
10514 }
10515 else
10516 {
10517 /*
10518 * We're writing a register to memory.
10519 */
10520 IEM_MC_BEGIN(0, 2);
10521 IEM_MC_LOCAL(uint8_t, u8Value);
10522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10525 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10526 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
10527 IEM_MC_ADVANCE_RIP();
10528 IEM_MC_END();
10529 }
10530 return VINF_SUCCESS;
10531
10532}
10533
10534
10535/** Opcode 0x89. */
10536FNIEMOP_DEF(iemOp_mov_Ev_Gv)
10537{
10538 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
10539
10540 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10541
10542 /*
10543 * If rm is denoting a register, no more instruction bytes.
10544 */
10545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10546 {
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 switch (pVCpu->iem.s.enmEffOpSize)
10549 {
10550 case IEMMODE_16BIT:
10551 IEM_MC_BEGIN(0, 1);
10552 IEM_MC_LOCAL(uint16_t, u16Value);
10553 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10555 IEM_MC_ADVANCE_RIP();
10556 IEM_MC_END();
10557 break;
10558
10559 case IEMMODE_32BIT:
10560 IEM_MC_BEGIN(0, 1);
10561 IEM_MC_LOCAL(uint32_t, u32Value);
10562 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10564 IEM_MC_ADVANCE_RIP();
10565 IEM_MC_END();
10566 break;
10567
10568 case IEMMODE_64BIT:
10569 IEM_MC_BEGIN(0, 1);
10570 IEM_MC_LOCAL(uint64_t, u64Value);
10571 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10573 IEM_MC_ADVANCE_RIP();
10574 IEM_MC_END();
10575 break;
10576 }
10577 }
10578 else
10579 {
10580 /*
10581 * We're writing a register to memory.
10582 */
10583 switch (pVCpu->iem.s.enmEffOpSize)
10584 {
10585 case IEMMODE_16BIT:
10586 IEM_MC_BEGIN(0, 2);
10587 IEM_MC_LOCAL(uint16_t, u16Value);
10588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10591 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10592 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10593 IEM_MC_ADVANCE_RIP();
10594 IEM_MC_END();
10595 break;
10596
10597 case IEMMODE_32BIT:
10598 IEM_MC_BEGIN(0, 2);
10599 IEM_MC_LOCAL(uint32_t, u32Value);
10600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10603 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10604 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
10605 IEM_MC_ADVANCE_RIP();
10606 IEM_MC_END();
10607 break;
10608
10609 case IEMMODE_64BIT:
10610 IEM_MC_BEGIN(0, 2);
10611 IEM_MC_LOCAL(uint64_t, u64Value);
10612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
10616 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
10617 IEM_MC_ADVANCE_RIP();
10618 IEM_MC_END();
10619 break;
10620 }
10621 }
10622 return VINF_SUCCESS;
10623}
10624
10625
10626/** Opcode 0x8a. */
10627FNIEMOP_DEF(iemOp_mov_Gb_Eb)
10628{
10629 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
10630
10631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10632
10633 /*
10634 * If rm is denoting a register, no more instruction bytes.
10635 */
10636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10637 {
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639 IEM_MC_BEGIN(0, 1);
10640 IEM_MC_LOCAL(uint8_t, u8Value);
10641 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10642 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10643 IEM_MC_ADVANCE_RIP();
10644 IEM_MC_END();
10645 }
10646 else
10647 {
10648 /*
10649 * We're loading a register from memory.
10650 */
10651 IEM_MC_BEGIN(0, 2);
10652 IEM_MC_LOCAL(uint8_t, u8Value);
10653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10657 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
10658 IEM_MC_ADVANCE_RIP();
10659 IEM_MC_END();
10660 }
10661 return VINF_SUCCESS;
10662}
10663
10664
10665/** Opcode 0x8b. */
10666FNIEMOP_DEF(iemOp_mov_Gv_Ev)
10667{
10668 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
10669
10670 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10671
10672 /*
10673 * If rm is denoting a register, no more instruction bytes.
10674 */
10675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10676 {
10677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10678 switch (pVCpu->iem.s.enmEffOpSize)
10679 {
10680 case IEMMODE_16BIT:
10681 IEM_MC_BEGIN(0, 1);
10682 IEM_MC_LOCAL(uint16_t, u16Value);
10683 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10684 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10685 IEM_MC_ADVANCE_RIP();
10686 IEM_MC_END();
10687 break;
10688
10689 case IEMMODE_32BIT:
10690 IEM_MC_BEGIN(0, 1);
10691 IEM_MC_LOCAL(uint32_t, u32Value);
10692 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10693 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10694 IEM_MC_ADVANCE_RIP();
10695 IEM_MC_END();
10696 break;
10697
10698 case IEMMODE_64BIT:
10699 IEM_MC_BEGIN(0, 1);
10700 IEM_MC_LOCAL(uint64_t, u64Value);
10701 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10702 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10703 IEM_MC_ADVANCE_RIP();
10704 IEM_MC_END();
10705 break;
10706 }
10707 }
10708 else
10709 {
10710 /*
10711 * We're loading a register from memory.
10712 */
10713 switch (pVCpu->iem.s.enmEffOpSize)
10714 {
10715 case IEMMODE_16BIT:
10716 IEM_MC_BEGIN(0, 2);
10717 IEM_MC_LOCAL(uint16_t, u16Value);
10718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10721 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10722 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
10723 IEM_MC_ADVANCE_RIP();
10724 IEM_MC_END();
10725 break;
10726
10727 case IEMMODE_32BIT:
10728 IEM_MC_BEGIN(0, 2);
10729 IEM_MC_LOCAL(uint32_t, u32Value);
10730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10733 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10734 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
10735 IEM_MC_ADVANCE_RIP();
10736 IEM_MC_END();
10737 break;
10738
10739 case IEMMODE_64BIT:
10740 IEM_MC_BEGIN(0, 2);
10741 IEM_MC_LOCAL(uint64_t, u64Value);
10742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10745 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10746 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
10747 IEM_MC_ADVANCE_RIP();
10748 IEM_MC_END();
10749 break;
10750 }
10751 }
10752 return VINF_SUCCESS;
10753}
10754
10755
10756/** Opcode 0x63. */
10757FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
10758{
10759 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
10760 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
10761 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10762 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
10763 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
10764}
10765
10766
10767/** Opcode 0x8c. */
10768FNIEMOP_DEF(iemOp_mov_Ev_Sw)
10769{
10770 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
10771
10772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10773
10774 /*
10775 * Check that the destination register exists. The REX.R prefix is ignored.
10776 */
10777 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10778 if ( iSegReg > X86_SREG_GS)
10779 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10780
10781 /*
10782 * If rm is denoting a register, no more instruction bytes.
10783 * In that case, the operand size is respected and the upper bits are
10784 * cleared (starting with some pentium).
10785 */
10786 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10787 {
10788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10789 switch (pVCpu->iem.s.enmEffOpSize)
10790 {
10791 case IEMMODE_16BIT:
10792 IEM_MC_BEGIN(0, 1);
10793 IEM_MC_LOCAL(uint16_t, u16Value);
10794 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10795 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
10796 IEM_MC_ADVANCE_RIP();
10797 IEM_MC_END();
10798 break;
10799
10800 case IEMMODE_32BIT:
10801 IEM_MC_BEGIN(0, 1);
10802 IEM_MC_LOCAL(uint32_t, u32Value);
10803 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10804 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
10805 IEM_MC_ADVANCE_RIP();
10806 IEM_MC_END();
10807 break;
10808
10809 case IEMMODE_64BIT:
10810 IEM_MC_BEGIN(0, 1);
10811 IEM_MC_LOCAL(uint64_t, u64Value);
10812 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10813 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
10814 IEM_MC_ADVANCE_RIP();
10815 IEM_MC_END();
10816 break;
10817 }
10818 }
10819 else
10820 {
10821 /*
10822 * We're saving the register to memory. The access is word sized
10823 * regardless of operand size prefixes.
10824 */
10825#if 0 /* not necessary */
10826 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10827#endif
10828 IEM_MC_BEGIN(0, 2);
10829 IEM_MC_LOCAL(uint16_t, u16Value);
10830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10833 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10834 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
10835 IEM_MC_ADVANCE_RIP();
10836 IEM_MC_END();
10837 }
10838 return VINF_SUCCESS;
10839}
10840
10841
10842
10843
10844/** Opcode 0x8d. */
10845FNIEMOP_DEF(iemOp_lea_Gv_M)
10846{
10847 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
10848 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10850 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10851
10852 switch (pVCpu->iem.s.enmEffOpSize)
10853 {
10854 case IEMMODE_16BIT:
10855 IEM_MC_BEGIN(0, 2);
10856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10857 IEM_MC_LOCAL(uint16_t, u16Cast);
10858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10860 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10861 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
10862 IEM_MC_ADVANCE_RIP();
10863 IEM_MC_END();
10864 return VINF_SUCCESS;
10865
10866 case IEMMODE_32BIT:
10867 IEM_MC_BEGIN(0, 2);
10868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10869 IEM_MC_LOCAL(uint32_t, u32Cast);
10870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10872 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10873 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
10874 IEM_MC_ADVANCE_RIP();
10875 IEM_MC_END();
10876 return VINF_SUCCESS;
10877
10878 case IEMMODE_64BIT:
10879 IEM_MC_BEGIN(0, 1);
10880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10883 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
10884 IEM_MC_ADVANCE_RIP();
10885 IEM_MC_END();
10886 return VINF_SUCCESS;
10887 }
10888 AssertFailedReturn(VERR_IEM_IPE_7);
10889}
10890
10891
10892/** Opcode 0x8e. */
10893FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10894{
10895 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
10896
10897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10898
10899 /*
10900 * The practical operand size is 16-bit.
10901 */
10902#if 0 /* not necessary */
10903 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
10904#endif
10905
10906 /*
10907 * Check that the destination register exists and can be used with this
10908 * instruction. The REX.R prefix is ignored.
10909 */
10910 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10911 if ( iSegReg == X86_SREG_CS
10912 || iSegReg > X86_SREG_GS)
10913 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10914
10915 /*
10916 * If rm is denoting a register, no more instruction bytes.
10917 */
10918 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10919 {
10920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10921 IEM_MC_BEGIN(2, 0);
10922 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10923 IEM_MC_ARG(uint16_t, u16Value, 1);
10924 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10925 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10926 IEM_MC_END();
10927 }
10928 else
10929 {
10930 /*
10931 * We're loading the register from memory. The access is word sized
10932 * regardless of operand size prefixes.
10933 */
10934 IEM_MC_BEGIN(2, 1);
10935 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10936 IEM_MC_ARG(uint16_t, u16Value, 1);
10937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10940 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10941 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10942 IEM_MC_END();
10943 }
10944 return VINF_SUCCESS;
10945}
10946
10947
10948/** Opcode 0x8f /0. */
10949FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10950{
10951 /* This bugger is rather annoying as it requires rSP to be updated before
10952 doing the effective address calculations. Will eventually require a
10953 split between the R/M+SIB decoding and the effective address
10954 calculation - which is something that is required for any attempt at
10955 reusing this code for a recompiler. It may also be good to have if we
10956 need to delay #UD exception caused by invalid lock prefixes.
10957
10958 For now, we'll do a mostly safe interpreter-only implementation here. */
10959 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10960 * now until tests show it's checked.. */
10961 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
10962
10963 /* Register access is relatively easy and can share code. */
10964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10965 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10966
10967 /*
10968 * Memory target.
10969 *
10970 * Intel says that RSP is incremented before it's used in any effective
10971 * address calcuations. This means some serious extra annoyance here since
10972 * we decode and calculate the effective address in one step and like to
10973 * delay committing registers till everything is done.
10974 *
10975 * So, we'll decode and calculate the effective address twice. This will
10976 * require some recoding if turned into a recompiler.
10977 */
10978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10979
10980#ifndef TST_IEM_CHECK_MC
10981 /* Calc effective address with modified ESP. */
10982/** @todo testcase */
10983 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
10984 RTGCPTR GCPtrEff;
10985 VBOXSTRICTRC rcStrict;
10986 switch (pVCpu->iem.s.enmEffOpSize)
10987 {
10988 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
10989 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
10990 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993 if (rcStrict != VINF_SUCCESS)
10994 return rcStrict;
10995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10996
10997 /* Perform the operation - this should be CImpl. */
10998 RTUINT64U TmpRsp;
10999 TmpRsp.u = pCtx->rsp;
11000 switch (pVCpu->iem.s.enmEffOpSize)
11001 {
11002 case IEMMODE_16BIT:
11003 {
11004 uint16_t u16Value;
11005 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
11006 if (rcStrict == VINF_SUCCESS)
11007 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
11008 break;
11009 }
11010
11011 case IEMMODE_32BIT:
11012 {
11013 uint32_t u32Value;
11014 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
11015 if (rcStrict == VINF_SUCCESS)
11016 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
11017 break;
11018 }
11019
11020 case IEMMODE_64BIT:
11021 {
11022 uint64_t u64Value;
11023 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
11024 if (rcStrict == VINF_SUCCESS)
11025 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
11026 break;
11027 }
11028
11029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11030 }
11031 if (rcStrict == VINF_SUCCESS)
11032 {
11033 pCtx->rsp = TmpRsp.u;
11034 iemRegUpdateRipAndClearRF(pVCpu);
11035 }
11036 return rcStrict;
11037
11038#else
11039 return VERR_IEM_IPE_2;
11040#endif
11041}
11042
11043
11044/** Opcode 0x8f. */
11045FNIEMOP_DEF(iemOp_Grp1A)
11046{
11047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11048 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
11049 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
11050
11051 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
11052 /** @todo XOP decoding. */
11053 IEMOP_MNEMONIC(xop_amd, "3-byte-xop");
11054 return IEMOP_RAISE_INVALID_OPCODE();
11055}
11056
11057
11058/**
11059 * Common 'xchg reg,rAX' helper.
11060 */
11061FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
11062{
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064
11065 iReg |= pVCpu->iem.s.uRexB;
11066 switch (pVCpu->iem.s.enmEffOpSize)
11067 {
11068 case IEMMODE_16BIT:
11069 IEM_MC_BEGIN(0, 2);
11070 IEM_MC_LOCAL(uint16_t, u16Tmp1);
11071 IEM_MC_LOCAL(uint16_t, u16Tmp2);
11072 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
11073 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
11074 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
11075 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
11076 IEM_MC_ADVANCE_RIP();
11077 IEM_MC_END();
11078 return VINF_SUCCESS;
11079
11080 case IEMMODE_32BIT:
11081 IEM_MC_BEGIN(0, 2);
11082 IEM_MC_LOCAL(uint32_t, u32Tmp1);
11083 IEM_MC_LOCAL(uint32_t, u32Tmp2);
11084 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
11085 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
11086 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
11087 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
11088 IEM_MC_ADVANCE_RIP();
11089 IEM_MC_END();
11090 return VINF_SUCCESS;
11091
11092 case IEMMODE_64BIT:
11093 IEM_MC_BEGIN(0, 2);
11094 IEM_MC_LOCAL(uint64_t, u64Tmp1);
11095 IEM_MC_LOCAL(uint64_t, u64Tmp2);
11096 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
11097 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
11098 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
11099 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
11100 IEM_MC_ADVANCE_RIP();
11101 IEM_MC_END();
11102 return VINF_SUCCESS;
11103
11104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11105 }
11106}
11107
11108
11109/** Opcode 0x90. */
11110FNIEMOP_DEF(iemOp_nop)
11111{
11112 /* R8/R8D and RAX/EAX can be exchanged. */
11113 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
11114 {
11115 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
11116 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
11117 }
11118
11119 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
11120 IEMOP_MNEMONIC(pause, "pause");
11121 else
11122 IEMOP_MNEMONIC(nop, "nop");
11123 IEM_MC_BEGIN(0, 0);
11124 IEM_MC_ADVANCE_RIP();
11125 IEM_MC_END();
11126 return VINF_SUCCESS;
11127}
11128
11129
11130/** Opcode 0x91. */
11131FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
11132{
11133 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
11134 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
11135}
11136
11137
11138/** Opcode 0x92. */
11139FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
11140{
11141 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
11142 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
11143}
11144
11145
11146/** Opcode 0x93. */
11147FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
11148{
11149 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
11150 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
11151}
11152
11153
11154/** Opcode 0x94. */
11155FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
11156{
11157 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
11158 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
11159}
11160
11161
11162/** Opcode 0x95. */
11163FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
11164{
11165 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
11166 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
11167}
11168
11169
11170/** Opcode 0x96. */
11171FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
11172{
11173 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
11174 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
11175}
11176
11177
11178/** Opcode 0x97. */
11179FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
11180{
11181 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
11182 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
11183}
11184
11185
11186/** Opcode 0x98. */
11187FNIEMOP_DEF(iemOp_cbw)
11188{
11189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11190 switch (pVCpu->iem.s.enmEffOpSize)
11191 {
11192 case IEMMODE_16BIT:
11193 IEMOP_MNEMONIC(cbw, "cbw");
11194 IEM_MC_BEGIN(0, 1);
11195 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
11196 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
11197 } IEM_MC_ELSE() {
11198 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
11199 } IEM_MC_ENDIF();
11200 IEM_MC_ADVANCE_RIP();
11201 IEM_MC_END();
11202 return VINF_SUCCESS;
11203
11204 case IEMMODE_32BIT:
11205 IEMOP_MNEMONIC(cwde, "cwde");
11206 IEM_MC_BEGIN(0, 1);
11207 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11208 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
11209 } IEM_MC_ELSE() {
11210 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
11211 } IEM_MC_ENDIF();
11212 IEM_MC_ADVANCE_RIP();
11213 IEM_MC_END();
11214 return VINF_SUCCESS;
11215
11216 case IEMMODE_64BIT:
11217 IEMOP_MNEMONIC(cdqe, "cdqe");
11218 IEM_MC_BEGIN(0, 1);
11219 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11220 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
11221 } IEM_MC_ELSE() {
11222 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
11223 } IEM_MC_ENDIF();
11224 IEM_MC_ADVANCE_RIP();
11225 IEM_MC_END();
11226 return VINF_SUCCESS;
11227
11228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11229 }
11230}
11231
11232
11233/** Opcode 0x99. */
11234FNIEMOP_DEF(iemOp_cwd)
11235{
11236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11237 switch (pVCpu->iem.s.enmEffOpSize)
11238 {
11239 case IEMMODE_16BIT:
11240 IEMOP_MNEMONIC(cwd, "cwd");
11241 IEM_MC_BEGIN(0, 1);
11242 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
11243 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
11244 } IEM_MC_ELSE() {
11245 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
11246 } IEM_MC_ENDIF();
11247 IEM_MC_ADVANCE_RIP();
11248 IEM_MC_END();
11249 return VINF_SUCCESS;
11250
11251 case IEMMODE_32BIT:
11252 IEMOP_MNEMONIC(cdq, "cdq");
11253 IEM_MC_BEGIN(0, 1);
11254 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
11255 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
11256 } IEM_MC_ELSE() {
11257 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
11258 } IEM_MC_ENDIF();
11259 IEM_MC_ADVANCE_RIP();
11260 IEM_MC_END();
11261 return VINF_SUCCESS;
11262
11263 case IEMMODE_64BIT:
11264 IEMOP_MNEMONIC(cqo, "cqo");
11265 IEM_MC_BEGIN(0, 1);
11266 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
11267 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
11268 } IEM_MC_ELSE() {
11269 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
11270 } IEM_MC_ENDIF();
11271 IEM_MC_ADVANCE_RIP();
11272 IEM_MC_END();
11273 return VINF_SUCCESS;
11274
11275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11276 }
11277}
11278
11279
11280/** Opcode 0x9a. */
11281FNIEMOP_DEF(iemOp_call_Ap)
11282{
11283 IEMOP_MNEMONIC(call_Ap, "call Ap");
11284 IEMOP_HLP_NO_64BIT();
11285
11286 /* Decode the far pointer address and pass it on to the far call C implementation. */
11287 uint32_t offSeg;
11288 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11289 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11290 else
11291 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11292 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11294 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11295}
11296
11297
11298/** Opcode 0x9b. (aka fwait) */
11299FNIEMOP_DEF(iemOp_wait)
11300{
11301 IEMOP_MNEMONIC(wait, "wait");
11302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11303
11304 IEM_MC_BEGIN(0, 0);
11305 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11306 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11307 IEM_MC_ADVANCE_RIP();
11308 IEM_MC_END();
11309 return VINF_SUCCESS;
11310}
11311
11312
11313/** Opcode 0x9c. */
11314FNIEMOP_DEF(iemOp_pushf_Fv)
11315{
11316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11318 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
11319}
11320
11321
11322/** Opcode 0x9d. */
11323FNIEMOP_DEF(iemOp_popf_Fv)
11324{
11325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11327 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
11328}
11329
11330
11331/** Opcode 0x9e. */
11332FNIEMOP_DEF(iemOp_sahf)
11333{
11334 IEMOP_MNEMONIC(sahf, "sahf");
11335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11336 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11337 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11338 return IEMOP_RAISE_INVALID_OPCODE();
11339 IEM_MC_BEGIN(0, 2);
11340 IEM_MC_LOCAL(uint32_t, u32Flags);
11341 IEM_MC_LOCAL(uint32_t, EFlags);
11342 IEM_MC_FETCH_EFLAGS(EFlags);
11343 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
11344 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11345 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
11346 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
11347 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
11348 IEM_MC_COMMIT_EFLAGS(EFlags);
11349 IEM_MC_ADVANCE_RIP();
11350 IEM_MC_END();
11351 return VINF_SUCCESS;
11352}
11353
11354
11355/** Opcode 0x9f. */
11356FNIEMOP_DEF(iemOp_lahf)
11357{
11358 IEMOP_MNEMONIC(lahf, "lahf");
11359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11360 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
11361 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
11362 return IEMOP_RAISE_INVALID_OPCODE();
11363 IEM_MC_BEGIN(0, 1);
11364 IEM_MC_LOCAL(uint8_t, u8Flags);
11365 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
11366 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
11367 IEM_MC_ADVANCE_RIP();
11368 IEM_MC_END();
11369 return VINF_SUCCESS;
11370}
11371
11372
11373/**
11374 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
11375 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
11376 * prefixes. Will return on failures.
11377 * @param a_GCPtrMemOff The variable to store the offset in.
11378 */
11379#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
11380 do \
11381 { \
11382 switch (pVCpu->iem.s.enmEffAddrMode) \
11383 { \
11384 case IEMMODE_16BIT: \
11385 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
11386 break; \
11387 case IEMMODE_32BIT: \
11388 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
11389 break; \
11390 case IEMMODE_64BIT: \
11391 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
11392 break; \
11393 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11394 } \
11395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11396 } while (0)
11397
11398/** Opcode 0xa0. */
11399FNIEMOP_DEF(iemOp_mov_Al_Ob)
11400{
11401 /*
11402 * Get the offset and fend of lock prefixes.
11403 */
11404 RTGCPTR GCPtrMemOff;
11405 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11406
11407 /*
11408 * Fetch AL.
11409 */
11410 IEM_MC_BEGIN(0,1);
11411 IEM_MC_LOCAL(uint8_t, u8Tmp);
11412 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11413 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11414 IEM_MC_ADVANCE_RIP();
11415 IEM_MC_END();
11416 return VINF_SUCCESS;
11417}
11418
11419
11420/** Opcode 0xa1. */
11421FNIEMOP_DEF(iemOp_mov_rAX_Ov)
11422{
11423 /*
11424 * Get the offset and fend of lock prefixes.
11425 */
11426 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
11427 RTGCPTR GCPtrMemOff;
11428 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11429
11430 /*
11431 * Fetch rAX.
11432 */
11433 switch (pVCpu->iem.s.enmEffOpSize)
11434 {
11435 case IEMMODE_16BIT:
11436 IEM_MC_BEGIN(0,1);
11437 IEM_MC_LOCAL(uint16_t, u16Tmp);
11438 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11439 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11440 IEM_MC_ADVANCE_RIP();
11441 IEM_MC_END();
11442 return VINF_SUCCESS;
11443
11444 case IEMMODE_32BIT:
11445 IEM_MC_BEGIN(0,1);
11446 IEM_MC_LOCAL(uint32_t, u32Tmp);
11447 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11448 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
11449 IEM_MC_ADVANCE_RIP();
11450 IEM_MC_END();
11451 return VINF_SUCCESS;
11452
11453 case IEMMODE_64BIT:
11454 IEM_MC_BEGIN(0,1);
11455 IEM_MC_LOCAL(uint64_t, u64Tmp);
11456 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
11457 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
11458 IEM_MC_ADVANCE_RIP();
11459 IEM_MC_END();
11460 return VINF_SUCCESS;
11461
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464}
11465
11466
11467/** Opcode 0xa2. */
11468FNIEMOP_DEF(iemOp_mov_Ob_AL)
11469{
11470 /*
11471 * Get the offset and fend of lock prefixes.
11472 */
11473 RTGCPTR GCPtrMemOff;
11474 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11475
11476 /*
11477 * Store AL.
11478 */
11479 IEM_MC_BEGIN(0,1);
11480 IEM_MC_LOCAL(uint8_t, u8Tmp);
11481 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
11482 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
11483 IEM_MC_ADVANCE_RIP();
11484 IEM_MC_END();
11485 return VINF_SUCCESS;
11486}
11487
11488
11489/** Opcode 0xa3. */
11490FNIEMOP_DEF(iemOp_mov_Ov_rAX)
11491{
11492 /*
11493 * Get the offset and fend of lock prefixes.
11494 */
11495 RTGCPTR GCPtrMemOff;
11496 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
11497
11498 /*
11499 * Store rAX.
11500 */
11501 switch (pVCpu->iem.s.enmEffOpSize)
11502 {
11503 case IEMMODE_16BIT:
11504 IEM_MC_BEGIN(0,1);
11505 IEM_MC_LOCAL(uint16_t, u16Tmp);
11506 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
11507 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
11508 IEM_MC_ADVANCE_RIP();
11509 IEM_MC_END();
11510 return VINF_SUCCESS;
11511
11512 case IEMMODE_32BIT:
11513 IEM_MC_BEGIN(0,1);
11514 IEM_MC_LOCAL(uint32_t, u32Tmp);
11515 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
11516 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
11517 IEM_MC_ADVANCE_RIP();
11518 IEM_MC_END();
11519 return VINF_SUCCESS;
11520
11521 case IEMMODE_64BIT:
11522 IEM_MC_BEGIN(0,1);
11523 IEM_MC_LOCAL(uint64_t, u64Tmp);
11524 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
11525 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
11526 IEM_MC_ADVANCE_RIP();
11527 IEM_MC_END();
11528 return VINF_SUCCESS;
11529
11530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11531 }
11532}
11533
11534/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
11535#define IEM_MOVS_CASE(ValBits, AddrBits) \
11536 IEM_MC_BEGIN(0, 2); \
11537 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11538 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11539 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11540 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
11541 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11542 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11543 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11544 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11545 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11546 } IEM_MC_ELSE() { \
11547 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11548 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11549 } IEM_MC_ENDIF(); \
11550 IEM_MC_ADVANCE_RIP(); \
11551 IEM_MC_END();
11552
11553/** Opcode 0xa4. */
11554FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
11555{
11556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11557
11558 /*
11559 * Use the C implementation if a repeat prefix is encountered.
11560 */
11561 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11562 {
11563 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
11564 switch (pVCpu->iem.s.enmEffAddrMode)
11565 {
11566 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
11567 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
11568 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
11569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11570 }
11571 }
11572 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
11573
11574 /*
11575 * Sharing case implementation with movs[wdq] below.
11576 */
11577 switch (pVCpu->iem.s.enmEffAddrMode)
11578 {
11579 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
11580 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
11581 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
11582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11583 }
11584 return VINF_SUCCESS;
11585}
11586
11587
11588/** Opcode 0xa5. */
11589FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
11590{
11591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11592
11593 /*
11594 * Use the C implementation if a repeat prefix is encountered.
11595 */
11596 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11597 {
11598 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
11599 switch (pVCpu->iem.s.enmEffOpSize)
11600 {
11601 case IEMMODE_16BIT:
11602 switch (pVCpu->iem.s.enmEffAddrMode)
11603 {
11604 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
11605 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
11606 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
11607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11608 }
11609 break;
11610 case IEMMODE_32BIT:
11611 switch (pVCpu->iem.s.enmEffAddrMode)
11612 {
11613 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
11614 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
11615 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618 case IEMMODE_64BIT:
11619 switch (pVCpu->iem.s.enmEffAddrMode)
11620 {
11621 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
11622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
11623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
11624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11625 }
11626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11627 }
11628 }
11629 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
11630
11631 /*
11632 * Annoying double switch here.
11633 * Using ugly macro for implementing the cases, sharing it with movsb.
11634 */
11635 switch (pVCpu->iem.s.enmEffOpSize)
11636 {
11637 case IEMMODE_16BIT:
11638 switch (pVCpu->iem.s.enmEffAddrMode)
11639 {
11640 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
11641 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
11642 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
11643 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11644 }
11645 break;
11646
11647 case IEMMODE_32BIT:
11648 switch (pVCpu->iem.s.enmEffAddrMode)
11649 {
11650 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
11651 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
11652 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
11653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11654 }
11655 break;
11656
11657 case IEMMODE_64BIT:
11658 switch (pVCpu->iem.s.enmEffAddrMode)
11659 {
11660 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11661 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
11662 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
11663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11664 }
11665 break;
11666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11667 }
11668 return VINF_SUCCESS;
11669}
11670
11671#undef IEM_MOVS_CASE
11672
11673/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
11674#define IEM_CMPS_CASE(ValBits, AddrBits) \
11675 IEM_MC_BEGIN(3, 3); \
11676 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
11677 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
11678 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11679 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
11680 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11681 \
11682 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11683 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
11684 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11685 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
11686 IEM_MC_REF_LOCAL(puValue1, uValue1); \
11687 IEM_MC_REF_EFLAGS(pEFlags); \
11688 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
11689 \
11690 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11691 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11692 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11693 } IEM_MC_ELSE() { \
11694 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11695 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11696 } IEM_MC_ENDIF(); \
11697 IEM_MC_ADVANCE_RIP(); \
11698 IEM_MC_END(); \
11699
11700/** Opcode 0xa6. */
11701FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
11702{
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704
11705 /*
11706 * Use the C implementation if a repeat prefix is encountered.
11707 */
11708 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11709 {
11710 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
11711 switch (pVCpu->iem.s.enmEffAddrMode)
11712 {
11713 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11714 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11715 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11717 }
11718 }
11719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11720 {
11721 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
11722 switch (pVCpu->iem.s.enmEffAddrMode)
11723 {
11724 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
11725 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
11726 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
11727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11728 }
11729 }
11730 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
11731
11732 /*
11733 * Sharing case implementation with cmps[wdq] below.
11734 */
11735 switch (pVCpu->iem.s.enmEffAddrMode)
11736 {
11737 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
11738 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
11739 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
11740 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11741 }
11742 return VINF_SUCCESS;
11743
11744}
11745
11746
11747/** Opcode 0xa7. */
11748FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
11749{
11750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11751
11752 /*
11753 * Use the C implementation if a repeat prefix is encountered.
11754 */
11755 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
11756 {
11757 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
11758 switch (pVCpu->iem.s.enmEffOpSize)
11759 {
11760 case IEMMODE_16BIT:
11761 switch (pVCpu->iem.s.enmEffAddrMode)
11762 {
11763 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11764 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11765 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11767 }
11768 break;
11769 case IEMMODE_32BIT:
11770 switch (pVCpu->iem.s.enmEffAddrMode)
11771 {
11772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11776 }
11777 case IEMMODE_64BIT:
11778 switch (pVCpu->iem.s.enmEffAddrMode)
11779 {
11780 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11781 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11782 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11784 }
11785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11786 }
11787 }
11788
11789 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
11790 {
11791 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
11792 switch (pVCpu->iem.s.enmEffOpSize)
11793 {
11794 case IEMMODE_16BIT:
11795 switch (pVCpu->iem.s.enmEffAddrMode)
11796 {
11797 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
11798 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
11799 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
11800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11801 }
11802 break;
11803 case IEMMODE_32BIT:
11804 switch (pVCpu->iem.s.enmEffAddrMode)
11805 {
11806 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
11807 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
11808 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
11809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11810 }
11811 case IEMMODE_64BIT:
11812 switch (pVCpu->iem.s.enmEffAddrMode)
11813 {
11814 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11815 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
11816 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
11817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11818 }
11819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11820 }
11821 }
11822
11823 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
11824
11825 /*
11826 * Annoying double switch here.
11827 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11828 */
11829 switch (pVCpu->iem.s.enmEffOpSize)
11830 {
11831 case IEMMODE_16BIT:
11832 switch (pVCpu->iem.s.enmEffAddrMode)
11833 {
11834 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11835 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11836 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11838 }
11839 break;
11840
11841 case IEMMODE_32BIT:
11842 switch (pVCpu->iem.s.enmEffAddrMode)
11843 {
11844 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11845 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11846 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11848 }
11849 break;
11850
11851 case IEMMODE_64BIT:
11852 switch (pVCpu->iem.s.enmEffAddrMode)
11853 {
11854 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11855 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11856 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11858 }
11859 break;
11860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11861 }
11862 return VINF_SUCCESS;
11863
11864}
11865
11866#undef IEM_CMPS_CASE
11867
11868/** Opcode 0xa8. */
11869FNIEMOP_DEF(iemOp_test_AL_Ib)
11870{
11871 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
11872 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11873 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11874}
11875
11876
11877/** Opcode 0xa9. */
11878FNIEMOP_DEF(iemOp_test_eAX_Iz)
11879{
11880 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
11881 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11883}
11884
11885
11886/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11887#define IEM_STOS_CASE(ValBits, AddrBits) \
11888 IEM_MC_BEGIN(0, 2); \
11889 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11890 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11891 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11892 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11893 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11895 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11896 } IEM_MC_ELSE() { \
11897 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11898 } IEM_MC_ENDIF(); \
11899 IEM_MC_ADVANCE_RIP(); \
11900 IEM_MC_END(); \
11901
11902/** Opcode 0xaa. */
11903FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11904{
11905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11906
11907 /*
11908 * Use the C implementation if a repeat prefix is encountered.
11909 */
11910 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11911 {
11912 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
11913 switch (pVCpu->iem.s.enmEffAddrMode)
11914 {
11915 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11916 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11917 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11919 }
11920 }
11921 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
11922
11923 /*
11924 * Sharing case implementation with stos[wdq] below.
11925 */
11926 switch (pVCpu->iem.s.enmEffAddrMode)
11927 {
11928 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11929 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11930 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11932 }
11933 return VINF_SUCCESS;
11934}
11935
11936
11937/** Opcode 0xab. */
11938FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11939{
11940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11941
11942 /*
11943 * Use the C implementation if a repeat prefix is encountered.
11944 */
11945 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11946 {
11947 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
11948 switch (pVCpu->iem.s.enmEffOpSize)
11949 {
11950 case IEMMODE_16BIT:
11951 switch (pVCpu->iem.s.enmEffAddrMode)
11952 {
11953 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11954 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11955 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11957 }
11958 break;
11959 case IEMMODE_32BIT:
11960 switch (pVCpu->iem.s.enmEffAddrMode)
11961 {
11962 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11963 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11964 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11966 }
11967 case IEMMODE_64BIT:
11968 switch (pVCpu->iem.s.enmEffAddrMode)
11969 {
11970 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11971 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11972 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11974 }
11975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11976 }
11977 }
11978 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
11979
11980 /*
11981 * Annoying double switch here.
11982 * Using ugly macro for implementing the cases, sharing it with stosb.
11983 */
11984 switch (pVCpu->iem.s.enmEffOpSize)
11985 {
11986 case IEMMODE_16BIT:
11987 switch (pVCpu->iem.s.enmEffAddrMode)
11988 {
11989 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11990 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11991 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11993 }
11994 break;
11995
11996 case IEMMODE_32BIT:
11997 switch (pVCpu->iem.s.enmEffAddrMode)
11998 {
11999 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
12000 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
12001 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
12002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12003 }
12004 break;
12005
12006 case IEMMODE_64BIT:
12007 switch (pVCpu->iem.s.enmEffAddrMode)
12008 {
12009 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12010 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
12011 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
12012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12013 }
12014 break;
12015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12016 }
12017 return VINF_SUCCESS;
12018}
12019
12020#undef IEM_STOS_CASE
12021
12022/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
12023#define IEM_LODS_CASE(ValBits, AddrBits) \
12024 IEM_MC_BEGIN(0, 2); \
12025 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
12026 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12027 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
12028 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
12029 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
12030 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12031 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12032 } IEM_MC_ELSE() { \
12033 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
12034 } IEM_MC_ENDIF(); \
12035 IEM_MC_ADVANCE_RIP(); \
12036 IEM_MC_END();
12037
12038/** Opcode 0xac. */
12039FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
12040{
12041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12042
12043 /*
12044 * Use the C implementation if a repeat prefix is encountered.
12045 */
12046 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12047 {
12048 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
12049 switch (pVCpu->iem.s.enmEffAddrMode)
12050 {
12051 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
12052 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
12053 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
12054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12055 }
12056 }
12057 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
12058
12059 /*
12060 * Sharing case implementation with stos[wdq] below.
12061 */
12062 switch (pVCpu->iem.s.enmEffAddrMode)
12063 {
12064 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
12065 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
12066 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
12067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12068 }
12069 return VINF_SUCCESS;
12070}
12071
12072
12073/** Opcode 0xad. */
12074FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
12075{
12076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12077
12078 /*
12079 * Use the C implementation if a repeat prefix is encountered.
12080 */
12081 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
12082 {
12083 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
12084 switch (pVCpu->iem.s.enmEffOpSize)
12085 {
12086 case IEMMODE_16BIT:
12087 switch (pVCpu->iem.s.enmEffAddrMode)
12088 {
12089 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
12090 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
12091 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
12092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12093 }
12094 break;
12095 case IEMMODE_32BIT:
12096 switch (pVCpu->iem.s.enmEffAddrMode)
12097 {
12098 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
12099 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
12100 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
12101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12102 }
12103 case IEMMODE_64BIT:
12104 switch (pVCpu->iem.s.enmEffAddrMode)
12105 {
12106 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
12107 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
12108 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
12109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12110 }
12111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12112 }
12113 }
12114 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
12115
12116 /*
12117 * Annoying double switch here.
12118 * Using ugly macro for implementing the cases, sharing it with lodsb.
12119 */
12120 switch (pVCpu->iem.s.enmEffOpSize)
12121 {
12122 case IEMMODE_16BIT:
12123 switch (pVCpu->iem.s.enmEffAddrMode)
12124 {
12125 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
12126 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
12127 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
12128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12129 }
12130 break;
12131
12132 case IEMMODE_32BIT:
12133 switch (pVCpu->iem.s.enmEffAddrMode)
12134 {
12135 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
12136 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
12137 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
12138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12139 }
12140 break;
12141
12142 case IEMMODE_64BIT:
12143 switch (pVCpu->iem.s.enmEffAddrMode)
12144 {
12145 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12146 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
12147 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
12148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12149 }
12150 break;
12151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12152 }
12153 return VINF_SUCCESS;
12154}
12155
12156#undef IEM_LODS_CASE
12157
12158/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
12159#define IEM_SCAS_CASE(ValBits, AddrBits) \
12160 IEM_MC_BEGIN(3, 2); \
12161 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
12162 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
12163 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
12164 IEM_MC_LOCAL(RTGCPTR, uAddr); \
12165 \
12166 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
12167 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
12168 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
12169 IEM_MC_REF_EFLAGS(pEFlags); \
12170 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
12171 \
12172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
12173 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12174 } IEM_MC_ELSE() { \
12175 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
12176 } IEM_MC_ENDIF(); \
12177 IEM_MC_ADVANCE_RIP(); \
12178 IEM_MC_END();
12179
12180/** Opcode 0xae. */
12181FNIEMOP_DEF(iemOp_scasb_AL_Xb)
12182{
12183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12184
12185 /*
12186 * Use the C implementation if a repeat prefix is encountered.
12187 */
12188 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12189 {
12190 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
12191 switch (pVCpu->iem.s.enmEffAddrMode)
12192 {
12193 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
12194 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
12195 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
12196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12197 }
12198 }
12199 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12200 {
12201 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
12202 switch (pVCpu->iem.s.enmEffAddrMode)
12203 {
12204 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
12205 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
12206 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
12207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12208 }
12209 }
12210 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
12211
12212 /*
12213 * Sharing case implementation with stos[wdq] below.
12214 */
12215 switch (pVCpu->iem.s.enmEffAddrMode)
12216 {
12217 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
12218 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
12219 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
12220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12221 }
12222 return VINF_SUCCESS;
12223}
12224
12225
12226/** Opcode 0xaf. */
12227FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
12228{
12229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12230
12231 /*
12232 * Use the C implementation if a repeat prefix is encountered.
12233 */
12234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
12235 {
12236 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
12237 switch (pVCpu->iem.s.enmEffOpSize)
12238 {
12239 case IEMMODE_16BIT:
12240 switch (pVCpu->iem.s.enmEffAddrMode)
12241 {
12242 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
12243 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
12244 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
12245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12246 }
12247 break;
12248 case IEMMODE_32BIT:
12249 switch (pVCpu->iem.s.enmEffAddrMode)
12250 {
12251 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
12252 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
12253 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
12254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12255 }
12256 case IEMMODE_64BIT:
12257 switch (pVCpu->iem.s.enmEffAddrMode)
12258 {
12259 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
12260 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
12261 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
12262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12263 }
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266 }
12267 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
12268 {
12269 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
12270 switch (pVCpu->iem.s.enmEffOpSize)
12271 {
12272 case IEMMODE_16BIT:
12273 switch (pVCpu->iem.s.enmEffAddrMode)
12274 {
12275 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
12276 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
12277 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
12278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12279 }
12280 break;
12281 case IEMMODE_32BIT:
12282 switch (pVCpu->iem.s.enmEffAddrMode)
12283 {
12284 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
12285 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
12286 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
12287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12288 }
12289 case IEMMODE_64BIT:
12290 switch (pVCpu->iem.s.enmEffAddrMode)
12291 {
12292 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
12293 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
12294 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
12295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12296 }
12297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12298 }
12299 }
12300 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
12301
12302 /*
12303 * Annoying double switch here.
12304 * Using ugly macro for implementing the cases, sharing it with scasb.
12305 */
12306 switch (pVCpu->iem.s.enmEffOpSize)
12307 {
12308 case IEMMODE_16BIT:
12309 switch (pVCpu->iem.s.enmEffAddrMode)
12310 {
12311 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
12312 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
12313 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 break;
12317
12318 case IEMMODE_32BIT:
12319 switch (pVCpu->iem.s.enmEffAddrMode)
12320 {
12321 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
12322 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
12323 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
12324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12325 }
12326 break;
12327
12328 case IEMMODE_64BIT:
12329 switch (pVCpu->iem.s.enmEffAddrMode)
12330 {
12331 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
12332 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
12333 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
12334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12335 }
12336 break;
12337 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12338 }
12339 return VINF_SUCCESS;
12340}
12341
12342#undef IEM_SCAS_CASE
12343
12344/**
12345 * Common 'mov r8, imm8' helper.
12346 */
12347FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
12348{
12349 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351
12352 IEM_MC_BEGIN(0, 1);
12353 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
12354 IEM_MC_STORE_GREG_U8(iReg, u8Value);
12355 IEM_MC_ADVANCE_RIP();
12356 IEM_MC_END();
12357
12358 return VINF_SUCCESS;
12359}
12360
12361
12362/** Opcode 0xb0. */
12363FNIEMOP_DEF(iemOp_mov_AL_Ib)
12364{
12365 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
12366 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12367}
12368
12369
12370/** Opcode 0xb1. */
12371FNIEMOP_DEF(iemOp_CL_Ib)
12372{
12373 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
12374 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12375}
12376
12377
12378/** Opcode 0xb2. */
12379FNIEMOP_DEF(iemOp_DL_Ib)
12380{
12381 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
12382 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12383}
12384
12385
12386/** Opcode 0xb3. */
12387FNIEMOP_DEF(iemOp_BL_Ib)
12388{
12389 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
12390 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12391}
12392
12393
12394/** Opcode 0xb4. */
12395FNIEMOP_DEF(iemOp_mov_AH_Ib)
12396{
12397 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
12398 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12399}
12400
12401
12402/** Opcode 0xb5. */
12403FNIEMOP_DEF(iemOp_CH_Ib)
12404{
12405 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
12406 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12407}
12408
12409
12410/** Opcode 0xb6. */
12411FNIEMOP_DEF(iemOp_DH_Ib)
12412{
12413 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
12414 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12415}
12416
12417
12418/** Opcode 0xb7. */
12419FNIEMOP_DEF(iemOp_BH_Ib)
12420{
12421 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
12422 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12423}
12424
12425
12426/**
12427 * Common 'mov regX,immX' helper.
12428 */
12429FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
12430{
12431 switch (pVCpu->iem.s.enmEffOpSize)
12432 {
12433 case IEMMODE_16BIT:
12434 {
12435 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12437
12438 IEM_MC_BEGIN(0, 1);
12439 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
12440 IEM_MC_STORE_GREG_U16(iReg, u16Value);
12441 IEM_MC_ADVANCE_RIP();
12442 IEM_MC_END();
12443 break;
12444 }
12445
12446 case IEMMODE_32BIT:
12447 {
12448 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12450
12451 IEM_MC_BEGIN(0, 1);
12452 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
12453 IEM_MC_STORE_GREG_U32(iReg, u32Value);
12454 IEM_MC_ADVANCE_RIP();
12455 IEM_MC_END();
12456 break;
12457 }
12458 case IEMMODE_64BIT:
12459 {
12460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
12461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12462
12463 IEM_MC_BEGIN(0, 1);
12464 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
12465 IEM_MC_STORE_GREG_U64(iReg, u64Value);
12466 IEM_MC_ADVANCE_RIP();
12467 IEM_MC_END();
12468 break;
12469 }
12470 }
12471
12472 return VINF_SUCCESS;
12473}
12474
12475
12476/** Opcode 0xb8. */
12477FNIEMOP_DEF(iemOp_eAX_Iv)
12478{
12479 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
12480 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12481}
12482
12483
12484/** Opcode 0xb9. */
12485FNIEMOP_DEF(iemOp_eCX_Iv)
12486{
12487 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
12488 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12489}
12490
12491
12492/** Opcode 0xba. */
12493FNIEMOP_DEF(iemOp_eDX_Iv)
12494{
12495 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
12496 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12497}
12498
12499
12500/** Opcode 0xbb. */
12501FNIEMOP_DEF(iemOp_eBX_Iv)
12502{
12503 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
12504 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12505}
12506
12507
12508/** Opcode 0xbc. */
12509FNIEMOP_DEF(iemOp_eSP_Iv)
12510{
12511 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
12512 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12513}
12514
12515
12516/** Opcode 0xbd. */
12517FNIEMOP_DEF(iemOp_eBP_Iv)
12518{
12519 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
12520 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12521}
12522
12523
12524/** Opcode 0xbe. */
12525FNIEMOP_DEF(iemOp_eSI_Iv)
12526{
12527 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
12528 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12529}
12530
12531
12532/** Opcode 0xbf. */
12533FNIEMOP_DEF(iemOp_eDI_Iv)
12534{
12535 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
12536 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12537}
12538
12539
12540/** Opcode 0xc0. */
12541FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
12542{
12543 IEMOP_HLP_MIN_186();
12544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12545 PCIEMOPSHIFTSIZES pImpl;
12546 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12547 {
12548 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
12549 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
12550 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
12551 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
12552 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
12553 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
12554 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
12555 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12556 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12557 }
12558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12559
12560 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12561 {
12562 /* register */
12563 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12565 IEM_MC_BEGIN(3, 0);
12566 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12567 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12568 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12569 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12570 IEM_MC_REF_EFLAGS(pEFlags);
12571 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12572 IEM_MC_ADVANCE_RIP();
12573 IEM_MC_END();
12574 }
12575 else
12576 {
12577 /* memory */
12578 IEM_MC_BEGIN(3, 2);
12579 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12580 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12581 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12583
12584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12585 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12586 IEM_MC_ASSIGN(cShiftArg, cShift);
12587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12588 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12589 IEM_MC_FETCH_EFLAGS(EFlags);
12590 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12591
12592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12593 IEM_MC_COMMIT_EFLAGS(EFlags);
12594 IEM_MC_ADVANCE_RIP();
12595 IEM_MC_END();
12596 }
12597 return VINF_SUCCESS;
12598}
12599
12600
12601/** Opcode 0xc1. */
12602FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
12603{
12604 IEMOP_HLP_MIN_186();
12605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12606 PCIEMOPSHIFTSIZES pImpl;
12607 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12608 {
12609 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
12610 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
12611 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
12612 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
12613 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
12614 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
12615 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
12616 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12618 }
12619 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12620
12621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12622 {
12623 /* register */
12624 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12626 switch (pVCpu->iem.s.enmEffOpSize)
12627 {
12628 case IEMMODE_16BIT:
12629 IEM_MC_BEGIN(3, 0);
12630 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12631 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12632 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12633 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12634 IEM_MC_REF_EFLAGS(pEFlags);
12635 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12636 IEM_MC_ADVANCE_RIP();
12637 IEM_MC_END();
12638 return VINF_SUCCESS;
12639
12640 case IEMMODE_32BIT:
12641 IEM_MC_BEGIN(3, 0);
12642 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12643 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12644 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12645 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12646 IEM_MC_REF_EFLAGS(pEFlags);
12647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12648 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12649 IEM_MC_ADVANCE_RIP();
12650 IEM_MC_END();
12651 return VINF_SUCCESS;
12652
12653 case IEMMODE_64BIT:
12654 IEM_MC_BEGIN(3, 0);
12655 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12656 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
12657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12658 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
12659 IEM_MC_REF_EFLAGS(pEFlags);
12660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12661 IEM_MC_ADVANCE_RIP();
12662 IEM_MC_END();
12663 return VINF_SUCCESS;
12664
12665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12666 }
12667 }
12668 else
12669 {
12670 /* memory */
12671 switch (pVCpu->iem.s.enmEffOpSize)
12672 {
12673 case IEMMODE_16BIT:
12674 IEM_MC_BEGIN(3, 2);
12675 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12676 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12677 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12679
12680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12681 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12682 IEM_MC_ASSIGN(cShiftArg, cShift);
12683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12684 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12685 IEM_MC_FETCH_EFLAGS(EFlags);
12686 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12687
12688 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12689 IEM_MC_COMMIT_EFLAGS(EFlags);
12690 IEM_MC_ADVANCE_RIP();
12691 IEM_MC_END();
12692 return VINF_SUCCESS;
12693
12694 case IEMMODE_32BIT:
12695 IEM_MC_BEGIN(3, 2);
12696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12697 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12698 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12700
12701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12702 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12703 IEM_MC_ASSIGN(cShiftArg, cShift);
12704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12705 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12706 IEM_MC_FETCH_EFLAGS(EFlags);
12707 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12708
12709 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12710 IEM_MC_COMMIT_EFLAGS(EFlags);
12711 IEM_MC_ADVANCE_RIP();
12712 IEM_MC_END();
12713 return VINF_SUCCESS;
12714
12715 case IEMMODE_64BIT:
12716 IEM_MC_BEGIN(3, 2);
12717 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12718 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12719 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12720 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12721
12722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12723 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
12724 IEM_MC_ASSIGN(cShiftArg, cShift);
12725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12726 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12727 IEM_MC_FETCH_EFLAGS(EFlags);
12728 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12729
12730 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12731 IEM_MC_COMMIT_EFLAGS(EFlags);
12732 IEM_MC_ADVANCE_RIP();
12733 IEM_MC_END();
12734 return VINF_SUCCESS;
12735
12736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12737 }
12738 }
12739}
12740
12741
12742/** Opcode 0xc2. */
12743FNIEMOP_DEF(iemOp_retn_Iw)
12744{
12745 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
12746 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12748 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12749 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
12750}
12751
12752
12753/** Opcode 0xc3. */
12754FNIEMOP_DEF(iemOp_retn)
12755{
12756 IEMOP_MNEMONIC(retn, "retn");
12757 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12759 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
12760}
12761
12762
12763/** Opcode 0xc4. */
12764FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
12765{
12766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12767 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
12768 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12769 {
12770 IEMOP_MNEMONIC(vex2_prefix, "2-byte-vex");
12771 /* The LES instruction is invalid 64-bit mode. In legacy and
12772 compatability mode it is invalid with MOD=3.
12773 The use as a VEX prefix is made possible by assigning the inverted
12774 REX.R to the top MOD bit, and the top bit in the inverted register
12775 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
12776 to accessing registers 0..7 in this VEX form. */
12777 /** @todo VEX: Just use new tables for it. */
12778 return IEMOP_RAISE_INVALID_OPCODE();
12779 }
12780 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
12781 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12782}
12783
12784
12785/** Opcode 0xc5. */
12786FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12787{
12788 /* The LDS instruction is invalid 64-bit mode. In legacy and
12789 compatability mode it is invalid with MOD=3.
12790 The use as a VEX prefix is made possible by assigning the inverted
12791 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12792 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12794 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
12795 {
12796 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12797 {
12798 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
12799 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12800 }
12801 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12802 }
12803
12804 IEMOP_MNEMONIC(vex3_prefix, "3-byte-vex");
12805 /** @todo Test when exctly the VEX conformance checks kick in during
12806 * instruction decoding and fetching (using \#PF). */
12807 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12808 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12809 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12810#if 0 /* will make sense of this next week... */
12811 if ( !(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12812 &&
12813 )
12814 {
12815
12816 }
12817#endif
12818
12819 /** @todo VEX: Just use new tables for it. */
12820 return IEMOP_RAISE_INVALID_OPCODE();
12821}
12822
12823
12824/** Opcode 0xc6. */
12825FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12826{
12827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12828 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12829 return IEMOP_RAISE_INVALID_OPCODE();
12830 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
12831
12832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12833 {
12834 /* register access */
12835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12837 IEM_MC_BEGIN(0, 0);
12838 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
12839 IEM_MC_ADVANCE_RIP();
12840 IEM_MC_END();
12841 }
12842 else
12843 {
12844 /* memory access. */
12845 IEM_MC_BEGIN(0, 1);
12846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12848 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12850 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
12851 IEM_MC_ADVANCE_RIP();
12852 IEM_MC_END();
12853 }
12854 return VINF_SUCCESS;
12855}
12856
12857
12858/** Opcode 0xc7. */
12859FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12860{
12861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12862 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12863 return IEMOP_RAISE_INVALID_OPCODE();
12864 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
12865
12866 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12867 {
12868 /* register access */
12869 switch (pVCpu->iem.s.enmEffOpSize)
12870 {
12871 case IEMMODE_16BIT:
12872 IEM_MC_BEGIN(0, 0);
12873 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12875 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
12876 IEM_MC_ADVANCE_RIP();
12877 IEM_MC_END();
12878 return VINF_SUCCESS;
12879
12880 case IEMMODE_32BIT:
12881 IEM_MC_BEGIN(0, 0);
12882 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12884 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
12885 IEM_MC_ADVANCE_RIP();
12886 IEM_MC_END();
12887 return VINF_SUCCESS;
12888
12889 case IEMMODE_64BIT:
12890 IEM_MC_BEGIN(0, 0);
12891 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12893 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
12894 IEM_MC_ADVANCE_RIP();
12895 IEM_MC_END();
12896 return VINF_SUCCESS;
12897
12898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12899 }
12900 }
12901 else
12902 {
12903 /* memory access. */
12904 switch (pVCpu->iem.s.enmEffOpSize)
12905 {
12906 case IEMMODE_16BIT:
12907 IEM_MC_BEGIN(0, 1);
12908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12910 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12912 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
12913 IEM_MC_ADVANCE_RIP();
12914 IEM_MC_END();
12915 return VINF_SUCCESS;
12916
12917 case IEMMODE_32BIT:
12918 IEM_MC_BEGIN(0, 1);
12919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12921 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
12924 IEM_MC_ADVANCE_RIP();
12925 IEM_MC_END();
12926 return VINF_SUCCESS;
12927
12928 case IEMMODE_64BIT:
12929 IEM_MC_BEGIN(0, 1);
12930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12932 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
12935 IEM_MC_ADVANCE_RIP();
12936 IEM_MC_END();
12937 return VINF_SUCCESS;
12938
12939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12940 }
12941 }
12942}
12943
12944
12945
12946
12947/** Opcode 0xc8. */
12948FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12949{
12950 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
12951 IEMOP_HLP_MIN_186();
12952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12953 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12954 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12956 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
12957}
12958
12959
12960/** Opcode 0xc9. */
12961FNIEMOP_DEF(iemOp_leave)
12962{
12963 IEMOP_MNEMONIC(leave, "leave");
12964 IEMOP_HLP_MIN_186();
12965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12967 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
12968}
12969
12970
12971/** Opcode 0xca. */
12972FNIEMOP_DEF(iemOp_retf_Iw)
12973{
12974 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
12975 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12977 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12978 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
12979}
12980
12981
12982/** Opcode 0xcb. */
12983FNIEMOP_DEF(iemOp_retf)
12984{
12985 IEMOP_MNEMONIC(retf, "retf");
12986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12987 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12988 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
12989}
12990
12991
12992/** Opcode 0xcc. */
12993FNIEMOP_DEF(iemOp_int_3)
12994{
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12996 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12997}
12998
12999
13000/** Opcode 0xcd. */
13001FNIEMOP_DEF(iemOp_int_Ib)
13002{
13003 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
13004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13005 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
13006}
13007
13008
13009/** Opcode 0xce. */
13010FNIEMOP_DEF(iemOp_into)
13011{
13012 IEMOP_MNEMONIC(into, "into");
13013 IEMOP_HLP_NO_64BIT();
13014
13015 IEM_MC_BEGIN(2, 0);
13016 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
13017 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
13018 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
13019 IEM_MC_END();
13020 return VINF_SUCCESS;
13021}
13022
13023
13024/** Opcode 0xcf. */
13025FNIEMOP_DEF(iemOp_iret)
13026{
13027 IEMOP_MNEMONIC(iret, "iret");
13028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13029 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
13030}
13031
13032
13033/** Opcode 0xd0. */
13034FNIEMOP_DEF(iemOp_Grp2_Eb_1)
13035{
13036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13037 PCIEMOPSHIFTSIZES pImpl;
13038 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13039 {
13040 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
13041 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
13042 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
13043 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
13044 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
13045 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
13046 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
13047 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13048 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13049 }
13050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13051
13052 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13053 {
13054 /* register */
13055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13056 IEM_MC_BEGIN(3, 0);
13057 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13058 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13059 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13060 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13061 IEM_MC_REF_EFLAGS(pEFlags);
13062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13063 IEM_MC_ADVANCE_RIP();
13064 IEM_MC_END();
13065 }
13066 else
13067 {
13068 /* memory */
13069 IEM_MC_BEGIN(3, 2);
13070 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13071 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
13072 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13074
13075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13077 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13078 IEM_MC_FETCH_EFLAGS(EFlags);
13079 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13080
13081 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13082 IEM_MC_COMMIT_EFLAGS(EFlags);
13083 IEM_MC_ADVANCE_RIP();
13084 IEM_MC_END();
13085 }
13086 return VINF_SUCCESS;
13087}
13088
13089
13090
13091/** Opcode 0xd1. */
13092FNIEMOP_DEF(iemOp_Grp2_Ev_1)
13093{
13094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13095 PCIEMOPSHIFTSIZES pImpl;
13096 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13097 {
13098 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
13099 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
13100 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
13101 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
13102 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
13103 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
13104 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
13105 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13106 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
13107 }
13108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13109
13110 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13111 {
13112 /* register */
13113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13114 switch (pVCpu->iem.s.enmEffOpSize)
13115 {
13116 case IEMMODE_16BIT:
13117 IEM_MC_BEGIN(3, 0);
13118 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13119 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13120 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13121 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13122 IEM_MC_REF_EFLAGS(pEFlags);
13123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13124 IEM_MC_ADVANCE_RIP();
13125 IEM_MC_END();
13126 return VINF_SUCCESS;
13127
13128 case IEMMODE_32BIT:
13129 IEM_MC_BEGIN(3, 0);
13130 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13131 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13133 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13134 IEM_MC_REF_EFLAGS(pEFlags);
13135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13137 IEM_MC_ADVANCE_RIP();
13138 IEM_MC_END();
13139 return VINF_SUCCESS;
13140
13141 case IEMMODE_64BIT:
13142 IEM_MC_BEGIN(3, 0);
13143 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13144 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13146 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13147 IEM_MC_REF_EFLAGS(pEFlags);
13148 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13149 IEM_MC_ADVANCE_RIP();
13150 IEM_MC_END();
13151 return VINF_SUCCESS;
13152
13153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13154 }
13155 }
13156 else
13157 {
13158 /* memory */
13159 switch (pVCpu->iem.s.enmEffOpSize)
13160 {
13161 case IEMMODE_16BIT:
13162 IEM_MC_BEGIN(3, 2);
13163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13164 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13165 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13167
13168 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13170 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13171 IEM_MC_FETCH_EFLAGS(EFlags);
13172 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13173
13174 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13175 IEM_MC_COMMIT_EFLAGS(EFlags);
13176 IEM_MC_ADVANCE_RIP();
13177 IEM_MC_END();
13178 return VINF_SUCCESS;
13179
13180 case IEMMODE_32BIT:
13181 IEM_MC_BEGIN(3, 2);
13182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13183 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13184 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13186
13187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13189 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13190 IEM_MC_FETCH_EFLAGS(EFlags);
13191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13192
13193 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13194 IEM_MC_COMMIT_EFLAGS(EFlags);
13195 IEM_MC_ADVANCE_RIP();
13196 IEM_MC_END();
13197 return VINF_SUCCESS;
13198
13199 case IEMMODE_64BIT:
13200 IEM_MC_BEGIN(3, 2);
13201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13202 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
13203 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13205
13206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13209 IEM_MC_FETCH_EFLAGS(EFlags);
13210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13211
13212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13213 IEM_MC_COMMIT_EFLAGS(EFlags);
13214 IEM_MC_ADVANCE_RIP();
13215 IEM_MC_END();
13216 return VINF_SUCCESS;
13217
13218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13219 }
13220 }
13221}
13222
13223
13224/** Opcode 0xd2. */
13225FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
13226{
13227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13228 PCIEMOPSHIFTSIZES pImpl;
13229 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13230 {
13231 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
13232 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
13233 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
13234 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
13235 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
13236 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
13237 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
13238 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13239 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
13240 }
13241 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13242
13243 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13244 {
13245 /* register */
13246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13247 IEM_MC_BEGIN(3, 0);
13248 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13249 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13251 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13252 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13253 IEM_MC_REF_EFLAGS(pEFlags);
13254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13255 IEM_MC_ADVANCE_RIP();
13256 IEM_MC_END();
13257 }
13258 else
13259 {
13260 /* memory */
13261 IEM_MC_BEGIN(3, 2);
13262 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13263 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13264 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13266
13267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13269 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13270 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13271 IEM_MC_FETCH_EFLAGS(EFlags);
13272 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
13273
13274 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
13275 IEM_MC_COMMIT_EFLAGS(EFlags);
13276 IEM_MC_ADVANCE_RIP();
13277 IEM_MC_END();
13278 }
13279 return VINF_SUCCESS;
13280}
13281
13282
13283/** Opcode 0xd3. */
13284FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
13285{
13286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13287 PCIEMOPSHIFTSIZES pImpl;
13288 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13289 {
13290 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
13291 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
13292 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
13293 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
13294 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
13295 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
13296 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
13297 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13298 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
13299 }
13300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
13301
13302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13303 {
13304 /* register */
13305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13306 switch (pVCpu->iem.s.enmEffOpSize)
13307 {
13308 case IEMMODE_16BIT:
13309 IEM_MC_BEGIN(3, 0);
13310 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13311 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13312 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13313 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13314 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13315 IEM_MC_REF_EFLAGS(pEFlags);
13316 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13317 IEM_MC_ADVANCE_RIP();
13318 IEM_MC_END();
13319 return VINF_SUCCESS;
13320
13321 case IEMMODE_32BIT:
13322 IEM_MC_BEGIN(3, 0);
13323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13324 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13326 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13327 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13328 IEM_MC_REF_EFLAGS(pEFlags);
13329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13330 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
13331 IEM_MC_ADVANCE_RIP();
13332 IEM_MC_END();
13333 return VINF_SUCCESS;
13334
13335 case IEMMODE_64BIT:
13336 IEM_MC_BEGIN(3, 0);
13337 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13338 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13339 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13340 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
13341 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13342 IEM_MC_REF_EFLAGS(pEFlags);
13343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13344 IEM_MC_ADVANCE_RIP();
13345 IEM_MC_END();
13346 return VINF_SUCCESS;
13347
13348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13349 }
13350 }
13351 else
13352 {
13353 /* memory */
13354 switch (pVCpu->iem.s.enmEffOpSize)
13355 {
13356 case IEMMODE_16BIT:
13357 IEM_MC_BEGIN(3, 2);
13358 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13359 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13360 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13362
13363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13365 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13366 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13367 IEM_MC_FETCH_EFLAGS(EFlags);
13368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
13369
13370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
13371 IEM_MC_COMMIT_EFLAGS(EFlags);
13372 IEM_MC_ADVANCE_RIP();
13373 IEM_MC_END();
13374 return VINF_SUCCESS;
13375
13376 case IEMMODE_32BIT:
13377 IEM_MC_BEGIN(3, 2);
13378 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13379 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13380 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13382
13383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13385 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13386 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13387 IEM_MC_FETCH_EFLAGS(EFlags);
13388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
13389
13390 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
13391 IEM_MC_COMMIT_EFLAGS(EFlags);
13392 IEM_MC_ADVANCE_RIP();
13393 IEM_MC_END();
13394 return VINF_SUCCESS;
13395
13396 case IEMMODE_64BIT:
13397 IEM_MC_BEGIN(3, 2);
13398 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13399 IEM_MC_ARG(uint8_t, cShiftArg, 1);
13400 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
13401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13402
13403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13405 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
13406 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
13407 IEM_MC_FETCH_EFLAGS(EFlags);
13408 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
13409
13410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
13411 IEM_MC_COMMIT_EFLAGS(EFlags);
13412 IEM_MC_ADVANCE_RIP();
13413 IEM_MC_END();
13414 return VINF_SUCCESS;
13415
13416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13417 }
13418 }
13419}
13420
13421/** Opcode 0xd4. */
13422FNIEMOP_DEF(iemOp_aam_Ib)
13423{
13424 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
13425 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13427 IEMOP_HLP_NO_64BIT();
13428 if (!bImm)
13429 return IEMOP_RAISE_DIVIDE_ERROR();
13430 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
13431}
13432
13433
13434/** Opcode 0xd5. */
13435FNIEMOP_DEF(iemOp_aad_Ib)
13436{
13437 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
13438 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13440 IEMOP_HLP_NO_64BIT();
13441 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
13442}
13443
13444
13445/** Opcode 0xd6. */
13446FNIEMOP_DEF(iemOp_salc)
13447{
13448 IEMOP_MNEMONIC(salc, "salc");
13449 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
13450 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
13451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13452 IEMOP_HLP_NO_64BIT();
13453
13454 IEM_MC_BEGIN(0, 0);
13455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
13456 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
13457 } IEM_MC_ELSE() {
13458 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
13459 } IEM_MC_ENDIF();
13460 IEM_MC_ADVANCE_RIP();
13461 IEM_MC_END();
13462 return VINF_SUCCESS;
13463}
13464
13465
13466/** Opcode 0xd7. */
13467FNIEMOP_DEF(iemOp_xlat)
13468{
13469 IEMOP_MNEMONIC(xlat, "xlat");
13470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13471 switch (pVCpu->iem.s.enmEffAddrMode)
13472 {
13473 case IEMMODE_16BIT:
13474 IEM_MC_BEGIN(2, 0);
13475 IEM_MC_LOCAL(uint8_t, u8Tmp);
13476 IEM_MC_LOCAL(uint16_t, u16Addr);
13477 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
13478 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
13479 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
13480 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13481 IEM_MC_ADVANCE_RIP();
13482 IEM_MC_END();
13483 return VINF_SUCCESS;
13484
13485 case IEMMODE_32BIT:
13486 IEM_MC_BEGIN(2, 0);
13487 IEM_MC_LOCAL(uint8_t, u8Tmp);
13488 IEM_MC_LOCAL(uint32_t, u32Addr);
13489 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
13490 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
13491 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
13492 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13493 IEM_MC_ADVANCE_RIP();
13494 IEM_MC_END();
13495 return VINF_SUCCESS;
13496
13497 case IEMMODE_64BIT:
13498 IEM_MC_BEGIN(2, 0);
13499 IEM_MC_LOCAL(uint8_t, u8Tmp);
13500 IEM_MC_LOCAL(uint64_t, u64Addr);
13501 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
13502 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
13503 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
13504 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
13505 IEM_MC_ADVANCE_RIP();
13506 IEM_MC_END();
13507 return VINF_SUCCESS;
13508
13509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13510 }
13511}
13512
13513
13514/**
13515 * Common worker for FPU instructions working on ST0 and STn, and storing the
13516 * result in ST0.
13517 *
13518 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13519 */
13520FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13521{
13522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13523
13524 IEM_MC_BEGIN(3, 1);
13525 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13526 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13527 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13528 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13529
13530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13532 IEM_MC_PREPARE_FPU_USAGE();
13533 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13534 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13535 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13536 IEM_MC_ELSE()
13537 IEM_MC_FPU_STACK_UNDERFLOW(0);
13538 IEM_MC_ENDIF();
13539 IEM_MC_ADVANCE_RIP();
13540
13541 IEM_MC_END();
13542 return VINF_SUCCESS;
13543}
13544
13545
13546/**
13547 * Common worker for FPU instructions working on ST0 and STn, and only affecting
13548 * flags.
13549 *
13550 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13551 */
13552FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13553{
13554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13555
13556 IEM_MC_BEGIN(3, 1);
13557 IEM_MC_LOCAL(uint16_t, u16Fsw);
13558 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13559 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13560 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13561
13562 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13563 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13564 IEM_MC_PREPARE_FPU_USAGE();
13565 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13566 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13567 IEM_MC_UPDATE_FSW(u16Fsw);
13568 IEM_MC_ELSE()
13569 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13570 IEM_MC_ENDIF();
13571 IEM_MC_ADVANCE_RIP();
13572
13573 IEM_MC_END();
13574 return VINF_SUCCESS;
13575}
13576
13577
13578/**
13579 * Common worker for FPU instructions working on ST0 and STn, only affecting
13580 * flags, and popping when done.
13581 *
13582 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13583 */
13584FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13585{
13586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13587
13588 IEM_MC_BEGIN(3, 1);
13589 IEM_MC_LOCAL(uint16_t, u16Fsw);
13590 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13591 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13592 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13593
13594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13596 IEM_MC_PREPARE_FPU_USAGE();
13597 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13598 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13599 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13600 IEM_MC_ELSE()
13601 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
13602 IEM_MC_ENDIF();
13603 IEM_MC_ADVANCE_RIP();
13604
13605 IEM_MC_END();
13606 return VINF_SUCCESS;
13607}
13608
13609
13610/** Opcode 0xd8 11/0. */
13611FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
13612{
13613 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
13614 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
13615}
13616
13617
13618/** Opcode 0xd8 11/1. */
13619FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
13620{
13621 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
13622 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
13623}
13624
13625
13626/** Opcode 0xd8 11/2. */
13627FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
13628{
13629 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
13630 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
13631}
13632
13633
13634/** Opcode 0xd8 11/3. */
13635FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
13636{
13637 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
13638 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
13639}
13640
13641
13642/** Opcode 0xd8 11/4. */
13643FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
13644{
13645 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
13646 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
13647}
13648
13649
13650/** Opcode 0xd8 11/5. */
13651FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
13652{
13653 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
13654 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
13655}
13656
13657
13658/** Opcode 0xd8 11/6. */
13659FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
13660{
13661 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
13662 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
13663}
13664
13665
13666/** Opcode 0xd8 11/7. */
13667FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
13668{
13669 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
13670 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
13671}
13672
13673
13674/**
13675 * Common worker for FPU instructions working on ST0 and an m32r, and storing
13676 * the result in ST0.
13677 *
13678 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13679 */
13680FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
13681{
13682 IEM_MC_BEGIN(3, 3);
13683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13684 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13685 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13686 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13687 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13688 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13689
13690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13692
13693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13695 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13696
13697 IEM_MC_PREPARE_FPU_USAGE();
13698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13699 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
13700 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13701 IEM_MC_ELSE()
13702 IEM_MC_FPU_STACK_UNDERFLOW(0);
13703 IEM_MC_ENDIF();
13704 IEM_MC_ADVANCE_RIP();
13705
13706 IEM_MC_END();
13707 return VINF_SUCCESS;
13708}
13709
13710
13711/** Opcode 0xd8 !11/0. */
13712FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
13713{
13714 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
13715 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
13716}
13717
13718
13719/** Opcode 0xd8 !11/1. */
13720FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
13721{
13722 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
13723 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
13724}
13725
13726
13727/** Opcode 0xd8 !11/2. */
13728FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
13729{
13730 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
13731
13732 IEM_MC_BEGIN(3, 3);
13733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13734 IEM_MC_LOCAL(uint16_t, u16Fsw);
13735 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13737 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13738 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13739
13740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13742
13743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13745 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13746
13747 IEM_MC_PREPARE_FPU_USAGE();
13748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13749 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13750 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13751 IEM_MC_ELSE()
13752 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13753 IEM_MC_ENDIF();
13754 IEM_MC_ADVANCE_RIP();
13755
13756 IEM_MC_END();
13757 return VINF_SUCCESS;
13758}
13759
13760
13761/** Opcode 0xd8 !11/3. */
13762FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
13763{
13764 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
13765
13766 IEM_MC_BEGIN(3, 3);
13767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13768 IEM_MC_LOCAL(uint16_t, u16Fsw);
13769 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
13770 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13772 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
13773
13774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13776
13777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13779 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13780
13781 IEM_MC_PREPARE_FPU_USAGE();
13782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13783 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
13784 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13785 IEM_MC_ELSE()
13786 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13787 IEM_MC_ENDIF();
13788 IEM_MC_ADVANCE_RIP();
13789
13790 IEM_MC_END();
13791 return VINF_SUCCESS;
13792}
13793
13794
13795/** Opcode 0xd8 !11/4. */
13796FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13797{
13798 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
13799 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13800}
13801
13802
13803/** Opcode 0xd8 !11/5. */
13804FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13805{
13806 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
13807 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13808}
13809
13810
13811/** Opcode 0xd8 !11/6. */
13812FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13813{
13814 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
13815 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13816}
13817
13818
13819/** Opcode 0xd8 !11/7. */
13820FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13821{
13822 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
13823 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13824}
13825
13826
13827/** Opcode 0xd8. */
13828FNIEMOP_DEF(iemOp_EscF0)
13829{
13830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13831 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
13832
13833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13834 {
13835 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13836 {
13837 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13838 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13839 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13840 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13841 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13842 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13843 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13844 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13846 }
13847 }
13848 else
13849 {
13850 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13851 {
13852 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13853 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13854 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13855 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13856 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13857 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13858 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13859 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13861 }
13862 }
13863}
13864
13865
13866/** Opcode 0xd9 /0 mem32real
13867 * @sa iemOp_fld_m64r */
13868FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13869{
13870 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
13871
13872 IEM_MC_BEGIN(2, 3);
13873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13874 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13875 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13876 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13877 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13878
13879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13881
13882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13884 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13885
13886 IEM_MC_PREPARE_FPU_USAGE();
13887 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13888 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13889 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13890 IEM_MC_ELSE()
13891 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13892 IEM_MC_ENDIF();
13893 IEM_MC_ADVANCE_RIP();
13894
13895 IEM_MC_END();
13896 return VINF_SUCCESS;
13897}
13898
13899
13900/** Opcode 0xd9 !11/2 mem32real */
13901FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13902{
13903 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
13904 IEM_MC_BEGIN(3, 2);
13905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13906 IEM_MC_LOCAL(uint16_t, u16Fsw);
13907 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13908 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13909 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13910
13911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13915
13916 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13917 IEM_MC_PREPARE_FPU_USAGE();
13918 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13919 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13920 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13921 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13922 IEM_MC_ELSE()
13923 IEM_MC_IF_FCW_IM()
13924 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13925 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13926 IEM_MC_ENDIF();
13927 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13928 IEM_MC_ENDIF();
13929 IEM_MC_ADVANCE_RIP();
13930
13931 IEM_MC_END();
13932 return VINF_SUCCESS;
13933}
13934
13935
13936/** Opcode 0xd9 !11/3 */
13937FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13938{
13939 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
13940 IEM_MC_BEGIN(3, 2);
13941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13942 IEM_MC_LOCAL(uint16_t, u16Fsw);
13943 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13944 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13946
13947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13951
13952 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
13953 IEM_MC_PREPARE_FPU_USAGE();
13954 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13955 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13956 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13957 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13958 IEM_MC_ELSE()
13959 IEM_MC_IF_FCW_IM()
13960 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13961 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13962 IEM_MC_ENDIF();
13963 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13964 IEM_MC_ENDIF();
13965 IEM_MC_ADVANCE_RIP();
13966
13967 IEM_MC_END();
13968 return VINF_SUCCESS;
13969}
13970
13971
13972/** Opcode 0xd9 !11/4 */
13973FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13974{
13975 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
13976 IEM_MC_BEGIN(3, 0);
13977 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
13978 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13979 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13983 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13984 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
13985 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13986 IEM_MC_END();
13987 return VINF_SUCCESS;
13988}
13989
13990
13991/** Opcode 0xd9 !11/5 */
13992FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13993{
13994 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
13995 IEM_MC_BEGIN(1, 1);
13996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13997 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14001 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14002 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14003 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
14004 IEM_MC_END();
14005 return VINF_SUCCESS;
14006}
14007
14008
14009/** Opcode 0xd9 !11/6 */
14010FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
14011{
14012 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
14013 IEM_MC_BEGIN(3, 0);
14014 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
14015 IEM_MC_ARG(uint8_t, iEffSeg, 1);
14016 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
14017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14020 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14021 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
14022 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
14023 IEM_MC_END();
14024 return VINF_SUCCESS;
14025}
14026
14027
14028/** Opcode 0xd9 !11/7 */
14029FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
14030{
14031 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
14032 IEM_MC_BEGIN(2, 0);
14033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14034 IEM_MC_LOCAL(uint16_t, u16Fcw);
14035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
14039 IEM_MC_FETCH_FCW(u16Fcw);
14040 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
14041 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14042 IEM_MC_END();
14043 return VINF_SUCCESS;
14044}
14045
14046
14047/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
14048FNIEMOP_DEF(iemOp_fnop)
14049{
14050 IEMOP_MNEMONIC(fnop, "fnop");
14051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14052
14053 IEM_MC_BEGIN(0, 0);
14054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14057 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
14058 * intel optimizations. Investigate. */
14059 IEM_MC_UPDATE_FPU_OPCODE_IP();
14060 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
14061 IEM_MC_END();
14062 return VINF_SUCCESS;
14063}
14064
14065
14066/** Opcode 0xd9 11/0 stN */
14067FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
14068{
14069 IEMOP_MNEMONIC(fld_stN, "fld stN");
14070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14071
14072 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14073 * indicates that it does. */
14074 IEM_MC_BEGIN(0, 2);
14075 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14076 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14077 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14078 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14079
14080 IEM_MC_PREPARE_FPU_USAGE();
14081 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
14082 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14083 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14084 IEM_MC_ELSE()
14085 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
14086 IEM_MC_ENDIF();
14087
14088 IEM_MC_ADVANCE_RIP();
14089 IEM_MC_END();
14090
14091 return VINF_SUCCESS;
14092}
14093
14094
14095/** Opcode 0xd9 11/3 stN */
14096FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
14097{
14098 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
14099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14100
14101 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
14102 * indicates that it does. */
14103 IEM_MC_BEGIN(1, 3);
14104 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
14105 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
14106 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14107 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
14108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14109 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14110
14111 IEM_MC_PREPARE_FPU_USAGE();
14112 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
14113 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
14114 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
14115 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14116 IEM_MC_ELSE()
14117 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
14118 IEM_MC_ENDIF();
14119
14120 IEM_MC_ADVANCE_RIP();
14121 IEM_MC_END();
14122
14123 return VINF_SUCCESS;
14124}
14125
14126
14127/** Opcode 0xd9 11/4, 0xdd 11/2. */
14128FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
14129{
14130 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
14131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14132
14133 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
14134 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
14135 if (!iDstReg)
14136 {
14137 IEM_MC_BEGIN(0, 1);
14138 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
14139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14140 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14141
14142 IEM_MC_PREPARE_FPU_USAGE();
14143 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
14144 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
14145 IEM_MC_ELSE()
14146 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
14147 IEM_MC_ENDIF();
14148
14149 IEM_MC_ADVANCE_RIP();
14150 IEM_MC_END();
14151 }
14152 else
14153 {
14154 IEM_MC_BEGIN(0, 2);
14155 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
14156 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14158 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14159
14160 IEM_MC_PREPARE_FPU_USAGE();
14161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14162 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
14163 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
14164 IEM_MC_ELSE()
14165 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
14166 IEM_MC_ENDIF();
14167
14168 IEM_MC_ADVANCE_RIP();
14169 IEM_MC_END();
14170 }
14171 return VINF_SUCCESS;
14172}
14173
14174
14175/**
14176 * Common worker for FPU instructions working on ST0 and replaces it with the
14177 * result, i.e. unary operators.
14178 *
14179 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14180 */
14181FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
14182{
14183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14184
14185 IEM_MC_BEGIN(2, 1);
14186 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14187 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14188 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14189
14190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14191 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14192 IEM_MC_PREPARE_FPU_USAGE();
14193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14194 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
14195 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14196 IEM_MC_ELSE()
14197 IEM_MC_FPU_STACK_UNDERFLOW(0);
14198 IEM_MC_ENDIF();
14199 IEM_MC_ADVANCE_RIP();
14200
14201 IEM_MC_END();
14202 return VINF_SUCCESS;
14203}
14204
14205
14206/** Opcode 0xd9 0xe0. */
14207FNIEMOP_DEF(iemOp_fchs)
14208{
14209 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
14210 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
14211}
14212
14213
14214/** Opcode 0xd9 0xe1. */
14215FNIEMOP_DEF(iemOp_fabs)
14216{
14217 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
14218 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
14219}
14220
14221
14222/**
14223 * Common worker for FPU instructions working on ST0 and only returns FSW.
14224 *
14225 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14226 */
14227FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
14228{
14229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14230
14231 IEM_MC_BEGIN(2, 1);
14232 IEM_MC_LOCAL(uint16_t, u16Fsw);
14233 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14235
14236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14238 IEM_MC_PREPARE_FPU_USAGE();
14239 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14240 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
14241 IEM_MC_UPDATE_FSW(u16Fsw);
14242 IEM_MC_ELSE()
14243 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
14244 IEM_MC_ENDIF();
14245 IEM_MC_ADVANCE_RIP();
14246
14247 IEM_MC_END();
14248 return VINF_SUCCESS;
14249}
14250
14251
14252/** Opcode 0xd9 0xe4. */
14253FNIEMOP_DEF(iemOp_ftst)
14254{
14255 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
14256 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
14257}
14258
14259
14260/** Opcode 0xd9 0xe5. */
14261FNIEMOP_DEF(iemOp_fxam)
14262{
14263 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
14264 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
14265}
14266
14267
14268/**
14269 * Common worker for FPU instructions pushing a constant onto the FPU stack.
14270 *
14271 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14272 */
14273FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
14274{
14275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14276
14277 IEM_MC_BEGIN(1, 1);
14278 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14279 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14280
14281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14283 IEM_MC_PREPARE_FPU_USAGE();
14284 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14285 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
14286 IEM_MC_PUSH_FPU_RESULT(FpuRes);
14287 IEM_MC_ELSE()
14288 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
14289 IEM_MC_ENDIF();
14290 IEM_MC_ADVANCE_RIP();
14291
14292 IEM_MC_END();
14293 return VINF_SUCCESS;
14294}
14295
14296
14297/** Opcode 0xd9 0xe8. */
14298FNIEMOP_DEF(iemOp_fld1)
14299{
14300 IEMOP_MNEMONIC(fld1, "fld1");
14301 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
14302}
14303
14304
14305/** Opcode 0xd9 0xe9. */
14306FNIEMOP_DEF(iemOp_fldl2t)
14307{
14308 IEMOP_MNEMONIC(fldl2t, "fldl2t");
14309 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
14310}
14311
14312
14313/** Opcode 0xd9 0xea. */
14314FNIEMOP_DEF(iemOp_fldl2e)
14315{
14316 IEMOP_MNEMONIC(fldl2e, "fldl2e");
14317 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
14318}
14319
14320/** Opcode 0xd9 0xeb. */
14321FNIEMOP_DEF(iemOp_fldpi)
14322{
14323 IEMOP_MNEMONIC(fldpi, "fldpi");
14324 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
14325}
14326
14327
14328/** Opcode 0xd9 0xec. */
14329FNIEMOP_DEF(iemOp_fldlg2)
14330{
14331 IEMOP_MNEMONIC(fldlg2, "fldlg2");
14332 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
14333}
14334
14335/** Opcode 0xd9 0xed. */
14336FNIEMOP_DEF(iemOp_fldln2)
14337{
14338 IEMOP_MNEMONIC(fldln2, "fldln2");
14339 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
14340}
14341
14342
14343/** Opcode 0xd9 0xee. */
14344FNIEMOP_DEF(iemOp_fldz)
14345{
14346 IEMOP_MNEMONIC(fldz, "fldz");
14347 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
14348}
14349
14350
14351/** Opcode 0xd9 0xf0. */
14352FNIEMOP_DEF(iemOp_f2xm1)
14353{
14354 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
14355 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
14356}
14357
14358
14359/**
14360 * Common worker for FPU instructions working on STn and ST0, storing the result
14361 * in STn, and popping the stack unless IE, DE or ZE was raised.
14362 *
14363 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14364 */
14365FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14366{
14367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14368
14369 IEM_MC_BEGIN(3, 1);
14370 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14371 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14374
14375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14377
14378 IEM_MC_PREPARE_FPU_USAGE();
14379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14380 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14381 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
14382 IEM_MC_ELSE()
14383 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
14384 IEM_MC_ENDIF();
14385 IEM_MC_ADVANCE_RIP();
14386
14387 IEM_MC_END();
14388 return VINF_SUCCESS;
14389}
14390
14391
14392/** Opcode 0xd9 0xf1. */
14393FNIEMOP_DEF(iemOp_fyl2x)
14394{
14395 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
14396 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
14397}
14398
14399
14400/**
14401 * Common worker for FPU instructions working on ST0 and having two outputs, one
14402 * replacing ST0 and one pushed onto the stack.
14403 *
14404 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14405 */
14406FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
14407{
14408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14409
14410 IEM_MC_BEGIN(2, 1);
14411 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
14412 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
14413 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
14414
14415 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14416 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14417 IEM_MC_PREPARE_FPU_USAGE();
14418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14419 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
14420 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
14421 IEM_MC_ELSE()
14422 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
14423 IEM_MC_ENDIF();
14424 IEM_MC_ADVANCE_RIP();
14425
14426 IEM_MC_END();
14427 return VINF_SUCCESS;
14428}
14429
14430
14431/** Opcode 0xd9 0xf2. */
14432FNIEMOP_DEF(iemOp_fptan)
14433{
14434 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
14435 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
14436}
14437
14438
14439/** Opcode 0xd9 0xf3. */
14440FNIEMOP_DEF(iemOp_fpatan)
14441{
14442 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
14443 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
14444}
14445
14446
14447/** Opcode 0xd9 0xf4. */
14448FNIEMOP_DEF(iemOp_fxtract)
14449{
14450 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
14451 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
14452}
14453
14454
14455/** Opcode 0xd9 0xf5. */
14456FNIEMOP_DEF(iemOp_fprem1)
14457{
14458 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
14459 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
14460}
14461
14462
14463/** Opcode 0xd9 0xf6. */
14464FNIEMOP_DEF(iemOp_fdecstp)
14465{
14466 IEMOP_MNEMONIC(fdecstp, "fdecstp");
14467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14468 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14469 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14470 * FINCSTP and FDECSTP. */
14471
14472 IEM_MC_BEGIN(0,0);
14473
14474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14475 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14476
14477 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14478 IEM_MC_FPU_STACK_DEC_TOP();
14479 IEM_MC_UPDATE_FSW_CONST(0);
14480
14481 IEM_MC_ADVANCE_RIP();
14482 IEM_MC_END();
14483 return VINF_SUCCESS;
14484}
14485
14486
14487/** Opcode 0xd9 0xf7. */
14488FNIEMOP_DEF(iemOp_fincstp)
14489{
14490 IEMOP_MNEMONIC(fincstp, "fincstp");
14491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14492 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
14493 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
14494 * FINCSTP and FDECSTP. */
14495
14496 IEM_MC_BEGIN(0,0);
14497
14498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14500
14501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
14502 IEM_MC_FPU_STACK_INC_TOP();
14503 IEM_MC_UPDATE_FSW_CONST(0);
14504
14505 IEM_MC_ADVANCE_RIP();
14506 IEM_MC_END();
14507 return VINF_SUCCESS;
14508}
14509
14510
14511/** Opcode 0xd9 0xf8. */
14512FNIEMOP_DEF(iemOp_fprem)
14513{
14514 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
14515 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
14516}
14517
14518
14519/** Opcode 0xd9 0xf9. */
14520FNIEMOP_DEF(iemOp_fyl2xp1)
14521{
14522 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
14523 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
14524}
14525
14526
14527/** Opcode 0xd9 0xfa. */
14528FNIEMOP_DEF(iemOp_fsqrt)
14529{
14530 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
14531 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
14532}
14533
14534
14535/** Opcode 0xd9 0xfb. */
14536FNIEMOP_DEF(iemOp_fsincos)
14537{
14538 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
14539 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
14540}
14541
14542
14543/** Opcode 0xd9 0xfc. */
14544FNIEMOP_DEF(iemOp_frndint)
14545{
14546 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
14547 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
14548}
14549
14550
14551/** Opcode 0xd9 0xfd. */
14552FNIEMOP_DEF(iemOp_fscale)
14553{
14554 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
14555 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
14556}
14557
14558
14559/** Opcode 0xd9 0xfe. */
14560FNIEMOP_DEF(iemOp_fsin)
14561{
14562 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
14563 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
14564}
14565
14566
14567/** Opcode 0xd9 0xff. */
14568FNIEMOP_DEF(iemOp_fcos)
14569{
14570 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
14571 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
14572}
14573
14574
14575/** Used by iemOp_EscF1. */
14576IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
14577{
14578 /* 0xe0 */ iemOp_fchs,
14579 /* 0xe1 */ iemOp_fabs,
14580 /* 0xe2 */ iemOp_Invalid,
14581 /* 0xe3 */ iemOp_Invalid,
14582 /* 0xe4 */ iemOp_ftst,
14583 /* 0xe5 */ iemOp_fxam,
14584 /* 0xe6 */ iemOp_Invalid,
14585 /* 0xe7 */ iemOp_Invalid,
14586 /* 0xe8 */ iemOp_fld1,
14587 /* 0xe9 */ iemOp_fldl2t,
14588 /* 0xea */ iemOp_fldl2e,
14589 /* 0xeb */ iemOp_fldpi,
14590 /* 0xec */ iemOp_fldlg2,
14591 /* 0xed */ iemOp_fldln2,
14592 /* 0xee */ iemOp_fldz,
14593 /* 0xef */ iemOp_Invalid,
14594 /* 0xf0 */ iemOp_f2xm1,
14595 /* 0xf1 */ iemOp_fyl2x,
14596 /* 0xf2 */ iemOp_fptan,
14597 /* 0xf3 */ iemOp_fpatan,
14598 /* 0xf4 */ iemOp_fxtract,
14599 /* 0xf5 */ iemOp_fprem1,
14600 /* 0xf6 */ iemOp_fdecstp,
14601 /* 0xf7 */ iemOp_fincstp,
14602 /* 0xf8 */ iemOp_fprem,
14603 /* 0xf9 */ iemOp_fyl2xp1,
14604 /* 0xfa */ iemOp_fsqrt,
14605 /* 0xfb */ iemOp_fsincos,
14606 /* 0xfc */ iemOp_frndint,
14607 /* 0xfd */ iemOp_fscale,
14608 /* 0xfe */ iemOp_fsin,
14609 /* 0xff */ iemOp_fcos
14610};
14611
14612
14613/** Opcode 0xd9. */
14614FNIEMOP_DEF(iemOp_EscF1)
14615{
14616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14617 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
14618
14619 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14620 {
14621 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14622 {
14623 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
14624 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
14625 case 2:
14626 if (bRm == 0xd0)
14627 return FNIEMOP_CALL(iemOp_fnop);
14628 return IEMOP_RAISE_INVALID_OPCODE();
14629 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
14630 case 4:
14631 case 5:
14632 case 6:
14633 case 7:
14634 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
14635 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
14636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14637 }
14638 }
14639 else
14640 {
14641 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14642 {
14643 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
14644 case 1: return IEMOP_RAISE_INVALID_OPCODE();
14645 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
14646 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
14647 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
14648 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
14649 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
14650 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
14651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14652 }
14653 }
14654}
14655
14656
14657/** Opcode 0xda 11/0. */
14658FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
14659{
14660 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
14661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14662
14663 IEM_MC_BEGIN(0, 1);
14664 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14665
14666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14668
14669 IEM_MC_PREPARE_FPU_USAGE();
14670 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
14672 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14673 IEM_MC_ENDIF();
14674 IEM_MC_UPDATE_FPU_OPCODE_IP();
14675 IEM_MC_ELSE()
14676 IEM_MC_FPU_STACK_UNDERFLOW(0);
14677 IEM_MC_ENDIF();
14678 IEM_MC_ADVANCE_RIP();
14679
14680 IEM_MC_END();
14681 return VINF_SUCCESS;
14682}
14683
14684
14685/** Opcode 0xda 11/1. */
14686FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
14687{
14688 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
14689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14690
14691 IEM_MC_BEGIN(0, 1);
14692 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14693
14694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14696
14697 IEM_MC_PREPARE_FPU_USAGE();
14698 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
14700 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14701 IEM_MC_ENDIF();
14702 IEM_MC_UPDATE_FPU_OPCODE_IP();
14703 IEM_MC_ELSE()
14704 IEM_MC_FPU_STACK_UNDERFLOW(0);
14705 IEM_MC_ENDIF();
14706 IEM_MC_ADVANCE_RIP();
14707
14708 IEM_MC_END();
14709 return VINF_SUCCESS;
14710}
14711
14712
14713/** Opcode 0xda 11/2. */
14714FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
14715{
14716 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
14717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14718
14719 IEM_MC_BEGIN(0, 1);
14720 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14721
14722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14724
14725 IEM_MC_PREPARE_FPU_USAGE();
14726 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14727 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14728 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14729 IEM_MC_ENDIF();
14730 IEM_MC_UPDATE_FPU_OPCODE_IP();
14731 IEM_MC_ELSE()
14732 IEM_MC_FPU_STACK_UNDERFLOW(0);
14733 IEM_MC_ENDIF();
14734 IEM_MC_ADVANCE_RIP();
14735
14736 IEM_MC_END();
14737 return VINF_SUCCESS;
14738}
14739
14740
14741/** Opcode 0xda 11/3. */
14742FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
14743{
14744 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
14745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14746
14747 IEM_MC_BEGIN(0, 1);
14748 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14749
14750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14752
14753 IEM_MC_PREPARE_FPU_USAGE();
14754 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
14756 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14757 IEM_MC_ENDIF();
14758 IEM_MC_UPDATE_FPU_OPCODE_IP();
14759 IEM_MC_ELSE()
14760 IEM_MC_FPU_STACK_UNDERFLOW(0);
14761 IEM_MC_ENDIF();
14762 IEM_MC_ADVANCE_RIP();
14763
14764 IEM_MC_END();
14765 return VINF_SUCCESS;
14766}
14767
14768
14769/**
14770 * Common worker for FPU instructions working on ST0 and STn, only affecting
14771 * flags, and popping twice when done.
14772 *
14773 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14774 */
14775FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
14776{
14777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14778
14779 IEM_MC_BEGIN(3, 1);
14780 IEM_MC_LOCAL(uint16_t, u16Fsw);
14781 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14782 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14783 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14784
14785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14787
14788 IEM_MC_PREPARE_FPU_USAGE();
14789 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
14790 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
14791 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
14792 IEM_MC_ELSE()
14793 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
14794 IEM_MC_ENDIF();
14795 IEM_MC_ADVANCE_RIP();
14796
14797 IEM_MC_END();
14798 return VINF_SUCCESS;
14799}
14800
14801
14802/** Opcode 0xda 0xe9. */
14803FNIEMOP_DEF(iemOp_fucompp)
14804{
14805 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
14806 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14807}
14808
14809
14810/**
14811 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14812 * the result in ST0.
14813 *
14814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14815 */
14816FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14817{
14818 IEM_MC_BEGIN(3, 3);
14819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14820 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14821 IEM_MC_LOCAL(int32_t, i32Val2);
14822 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14824 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14825
14826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14828
14829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14831 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14832
14833 IEM_MC_PREPARE_FPU_USAGE();
14834 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14835 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14836 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14837 IEM_MC_ELSE()
14838 IEM_MC_FPU_STACK_UNDERFLOW(0);
14839 IEM_MC_ENDIF();
14840 IEM_MC_ADVANCE_RIP();
14841
14842 IEM_MC_END();
14843 return VINF_SUCCESS;
14844}
14845
14846
14847/** Opcode 0xda !11/0. */
14848FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14849{
14850 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
14851 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14852}
14853
14854
14855/** Opcode 0xda !11/1. */
14856FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14857{
14858 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
14859 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14860}
14861
14862
14863/** Opcode 0xda !11/2. */
14864FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14865{
14866 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
14867
14868 IEM_MC_BEGIN(3, 3);
14869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14870 IEM_MC_LOCAL(uint16_t, u16Fsw);
14871 IEM_MC_LOCAL(int32_t, i32Val2);
14872 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14873 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14874 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14875
14876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14878
14879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14880 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14881 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14882
14883 IEM_MC_PREPARE_FPU_USAGE();
14884 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14885 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14886 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14887 IEM_MC_ELSE()
14888 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14889 IEM_MC_ENDIF();
14890 IEM_MC_ADVANCE_RIP();
14891
14892 IEM_MC_END();
14893 return VINF_SUCCESS;
14894}
14895
14896
14897/** Opcode 0xda !11/3. */
14898FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14899{
14900 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
14901
14902 IEM_MC_BEGIN(3, 3);
14903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14904 IEM_MC_LOCAL(uint16_t, u16Fsw);
14905 IEM_MC_LOCAL(int32_t, i32Val2);
14906 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14907 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14908 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14909
14910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14912
14913 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14914 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14915 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14916
14917 IEM_MC_PREPARE_FPU_USAGE();
14918 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14919 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14920 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14921 IEM_MC_ELSE()
14922 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14923 IEM_MC_ENDIF();
14924 IEM_MC_ADVANCE_RIP();
14925
14926 IEM_MC_END();
14927 return VINF_SUCCESS;
14928}
14929
14930
14931/** Opcode 0xda !11/4. */
14932FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14933{
14934 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
14935 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14936}
14937
14938
14939/** Opcode 0xda !11/5. */
14940FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14941{
14942 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
14943 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14944}
14945
14946
14947/** Opcode 0xda !11/6. */
14948FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14949{
14950 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
14951 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14952}
14953
14954
14955/** Opcode 0xda !11/7. */
14956FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14957{
14958 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
14959 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14960}
14961
14962
14963/** Opcode 0xda. */
14964FNIEMOP_DEF(iemOp_EscF2)
14965{
14966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14967 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
14968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14969 {
14970 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14971 {
14972 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14973 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14974 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14975 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14976 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14977 case 5:
14978 if (bRm == 0xe9)
14979 return FNIEMOP_CALL(iemOp_fucompp);
14980 return IEMOP_RAISE_INVALID_OPCODE();
14981 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14982 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14984 }
14985 }
14986 else
14987 {
14988 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14989 {
14990 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14991 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14992 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14993 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14994 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14995 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14996 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14997 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14999 }
15000 }
15001}
15002
15003
15004/** Opcode 0xdb !11/0. */
15005FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
15006{
15007 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
15008
15009 IEM_MC_BEGIN(2, 3);
15010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15011 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15012 IEM_MC_LOCAL(int32_t, i32Val);
15013 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15014 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
15015
15016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15018
15019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15021 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15022
15023 IEM_MC_PREPARE_FPU_USAGE();
15024 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15025 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
15026 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15027 IEM_MC_ELSE()
15028 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15029 IEM_MC_ENDIF();
15030 IEM_MC_ADVANCE_RIP();
15031
15032 IEM_MC_END();
15033 return VINF_SUCCESS;
15034}
15035
15036
15037/** Opcode 0xdb !11/1. */
15038FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
15039{
15040 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
15041 IEM_MC_BEGIN(3, 2);
15042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15043 IEM_MC_LOCAL(uint16_t, u16Fsw);
15044 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15045 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15046 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15047
15048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15052
15053 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15054 IEM_MC_PREPARE_FPU_USAGE();
15055 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15056 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15057 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15058 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15059 IEM_MC_ELSE()
15060 IEM_MC_IF_FCW_IM()
15061 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15062 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15063 IEM_MC_ENDIF();
15064 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15065 IEM_MC_ENDIF();
15066 IEM_MC_ADVANCE_RIP();
15067
15068 IEM_MC_END();
15069 return VINF_SUCCESS;
15070}
15071
15072
15073/** Opcode 0xdb !11/2. */
15074FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
15075{
15076 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
15077 IEM_MC_BEGIN(3, 2);
15078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15079 IEM_MC_LOCAL(uint16_t, u16Fsw);
15080 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15081 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15083
15084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15086 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15087 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15088
15089 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15090 IEM_MC_PREPARE_FPU_USAGE();
15091 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15092 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15093 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15094 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15095 IEM_MC_ELSE()
15096 IEM_MC_IF_FCW_IM()
15097 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15098 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15099 IEM_MC_ENDIF();
15100 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15101 IEM_MC_ENDIF();
15102 IEM_MC_ADVANCE_RIP();
15103
15104 IEM_MC_END();
15105 return VINF_SUCCESS;
15106}
15107
15108
15109/** Opcode 0xdb !11/3. */
15110FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
15111{
15112 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
15113 IEM_MC_BEGIN(3, 2);
15114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15115 IEM_MC_LOCAL(uint16_t, u16Fsw);
15116 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15117 IEM_MC_ARG(int32_t *, pi32Dst, 1);
15118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15119
15120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15124
15125 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15126 IEM_MC_PREPARE_FPU_USAGE();
15127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15128 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
15129 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
15130 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15131 IEM_MC_ELSE()
15132 IEM_MC_IF_FCW_IM()
15133 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
15134 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
15135 IEM_MC_ENDIF();
15136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15137 IEM_MC_ENDIF();
15138 IEM_MC_ADVANCE_RIP();
15139
15140 IEM_MC_END();
15141 return VINF_SUCCESS;
15142}
15143
15144
15145/** Opcode 0xdb !11/5. */
15146FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
15147{
15148 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
15149
15150 IEM_MC_BEGIN(2, 3);
15151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15152 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15153 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
15154 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15155 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
15156
15157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15159
15160 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15161 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15162 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15163
15164 IEM_MC_PREPARE_FPU_USAGE();
15165 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15166 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
15167 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15168 IEM_MC_ELSE()
15169 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15170 IEM_MC_ENDIF();
15171 IEM_MC_ADVANCE_RIP();
15172
15173 IEM_MC_END();
15174 return VINF_SUCCESS;
15175}
15176
15177
15178/** Opcode 0xdb !11/7. */
15179FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
15180{
15181 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
15182 IEM_MC_BEGIN(3, 2);
15183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15184 IEM_MC_LOCAL(uint16_t, u16Fsw);
15185 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15186 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
15187 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15188
15189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15193
15194 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15195 IEM_MC_PREPARE_FPU_USAGE();
15196 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15197 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
15198 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
15199 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15200 IEM_MC_ELSE()
15201 IEM_MC_IF_FCW_IM()
15202 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
15203 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
15204 IEM_MC_ENDIF();
15205 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15206 IEM_MC_ENDIF();
15207 IEM_MC_ADVANCE_RIP();
15208
15209 IEM_MC_END();
15210 return VINF_SUCCESS;
15211}
15212
15213
15214/** Opcode 0xdb 11/0. */
15215FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
15216{
15217 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
15218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15219
15220 IEM_MC_BEGIN(0, 1);
15221 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15222
15223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15225
15226 IEM_MC_PREPARE_FPU_USAGE();
15227 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15228 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
15229 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15230 IEM_MC_ENDIF();
15231 IEM_MC_UPDATE_FPU_OPCODE_IP();
15232 IEM_MC_ELSE()
15233 IEM_MC_FPU_STACK_UNDERFLOW(0);
15234 IEM_MC_ENDIF();
15235 IEM_MC_ADVANCE_RIP();
15236
15237 IEM_MC_END();
15238 return VINF_SUCCESS;
15239}
15240
15241
15242/** Opcode 0xdb 11/1. */
15243FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
15244{
15245 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
15246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15247
15248 IEM_MC_BEGIN(0, 1);
15249 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15250
15251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15253
15254 IEM_MC_PREPARE_FPU_USAGE();
15255 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15256 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
15257 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15258 IEM_MC_ENDIF();
15259 IEM_MC_UPDATE_FPU_OPCODE_IP();
15260 IEM_MC_ELSE()
15261 IEM_MC_FPU_STACK_UNDERFLOW(0);
15262 IEM_MC_ENDIF();
15263 IEM_MC_ADVANCE_RIP();
15264
15265 IEM_MC_END();
15266 return VINF_SUCCESS;
15267}
15268
15269
15270/** Opcode 0xdb 11/2. */
15271FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
15272{
15273 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
15274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15275
15276 IEM_MC_BEGIN(0, 1);
15277 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15278
15279 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15280 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15281
15282 IEM_MC_PREPARE_FPU_USAGE();
15283 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15284 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
15285 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15286 IEM_MC_ENDIF();
15287 IEM_MC_UPDATE_FPU_OPCODE_IP();
15288 IEM_MC_ELSE()
15289 IEM_MC_FPU_STACK_UNDERFLOW(0);
15290 IEM_MC_ENDIF();
15291 IEM_MC_ADVANCE_RIP();
15292
15293 IEM_MC_END();
15294 return VINF_SUCCESS;
15295}
15296
15297
15298/** Opcode 0xdb 11/3. */
15299FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
15300{
15301 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
15302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15303
15304 IEM_MC_BEGIN(0, 1);
15305 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
15306
15307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15309
15310 IEM_MC_PREPARE_FPU_USAGE();
15311 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
15312 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
15313 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
15314 IEM_MC_ENDIF();
15315 IEM_MC_UPDATE_FPU_OPCODE_IP();
15316 IEM_MC_ELSE()
15317 IEM_MC_FPU_STACK_UNDERFLOW(0);
15318 IEM_MC_ENDIF();
15319 IEM_MC_ADVANCE_RIP();
15320
15321 IEM_MC_END();
15322 return VINF_SUCCESS;
15323}
15324
15325
15326/** Opcode 0xdb 0xe0. */
15327FNIEMOP_DEF(iemOp_fneni)
15328{
15329 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
15330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15331 IEM_MC_BEGIN(0,0);
15332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15333 IEM_MC_ADVANCE_RIP();
15334 IEM_MC_END();
15335 return VINF_SUCCESS;
15336}
15337
15338
15339/** Opcode 0xdb 0xe1. */
15340FNIEMOP_DEF(iemOp_fndisi)
15341{
15342 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
15343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15344 IEM_MC_BEGIN(0,0);
15345 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15346 IEM_MC_ADVANCE_RIP();
15347 IEM_MC_END();
15348 return VINF_SUCCESS;
15349}
15350
15351
15352/** Opcode 0xdb 0xe2. */
15353FNIEMOP_DEF(iemOp_fnclex)
15354{
15355 IEMOP_MNEMONIC(fnclex, "fnclex");
15356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15357
15358 IEM_MC_BEGIN(0,0);
15359 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15360 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15361 IEM_MC_CLEAR_FSW_EX();
15362 IEM_MC_ADVANCE_RIP();
15363 IEM_MC_END();
15364 return VINF_SUCCESS;
15365}
15366
15367
15368/** Opcode 0xdb 0xe3. */
15369FNIEMOP_DEF(iemOp_fninit)
15370{
15371 IEMOP_MNEMONIC(fninit, "fninit");
15372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15373 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
15374}
15375
15376
15377/** Opcode 0xdb 0xe4. */
15378FNIEMOP_DEF(iemOp_fnsetpm)
15379{
15380 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
15381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15382 IEM_MC_BEGIN(0,0);
15383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15384 IEM_MC_ADVANCE_RIP();
15385 IEM_MC_END();
15386 return VINF_SUCCESS;
15387}
15388
15389
15390/** Opcode 0xdb 0xe5. */
15391FNIEMOP_DEF(iemOp_frstpm)
15392{
15393 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
15394#if 0 /* #UDs on newer CPUs */
15395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15396 IEM_MC_BEGIN(0,0);
15397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15398 IEM_MC_ADVANCE_RIP();
15399 IEM_MC_END();
15400 return VINF_SUCCESS;
15401#else
15402 return IEMOP_RAISE_INVALID_OPCODE();
15403#endif
15404}
15405
15406
15407/** Opcode 0xdb 11/5. */
15408FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
15409{
15410 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
15411 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
15412}
15413
15414
15415/** Opcode 0xdb 11/6. */
15416FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
15417{
15418 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
15419 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
15420}
15421
15422
15423/** Opcode 0xdb. */
15424FNIEMOP_DEF(iemOp_EscF3)
15425{
15426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15427 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
15428 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15429 {
15430 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15431 {
15432 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
15433 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
15434 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
15435 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
15436 case 4:
15437 switch (bRm)
15438 {
15439 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
15440 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
15441 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
15442 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
15443 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
15444 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
15445 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
15446 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
15447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15448 }
15449 break;
15450 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
15451 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
15452 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15454 }
15455 }
15456 else
15457 {
15458 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15459 {
15460 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
15461 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
15462 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
15463 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
15464 case 4: return IEMOP_RAISE_INVALID_OPCODE();
15465 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
15466 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15467 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
15468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15469 }
15470 }
15471}
15472
15473
15474/**
15475 * Common worker for FPU instructions working on STn and ST0, and storing the
15476 * result in STn unless IE, DE or ZE was raised.
15477 *
15478 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15479 */
15480FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
15481{
15482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15483
15484 IEM_MC_BEGIN(3, 1);
15485 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15486 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15487 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
15489
15490 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15491 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15492
15493 IEM_MC_PREPARE_FPU_USAGE();
15494 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
15495 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
15496 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15497 IEM_MC_ELSE()
15498 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15499 IEM_MC_ENDIF();
15500 IEM_MC_ADVANCE_RIP();
15501
15502 IEM_MC_END();
15503 return VINF_SUCCESS;
15504}
15505
15506
15507/** Opcode 0xdc 11/0. */
15508FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
15509{
15510 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
15511 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
15512}
15513
15514
15515/** Opcode 0xdc 11/1. */
15516FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
15517{
15518 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
15519 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
15520}
15521
15522
15523/** Opcode 0xdc 11/4. */
15524FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
15525{
15526 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
15527 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
15528}
15529
15530
15531/** Opcode 0xdc 11/5. */
15532FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
15533{
15534 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
15535 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
15536}
15537
15538
15539/** Opcode 0xdc 11/6. */
15540FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
15541{
15542 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
15543 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
15544}
15545
15546
15547/** Opcode 0xdc 11/7. */
15548FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
15549{
15550 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
15551 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
15552}
15553
15554
15555/**
15556 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
15557 * memory operand, and storing the result in ST0.
15558 *
15559 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15560 */
15561FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
15562{
15563 IEM_MC_BEGIN(3, 3);
15564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15565 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15566 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
15567 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15568 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
15569 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
15570
15571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15575
15576 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15577 IEM_MC_PREPARE_FPU_USAGE();
15578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
15579 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
15580 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15581 IEM_MC_ELSE()
15582 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15583 IEM_MC_ENDIF();
15584 IEM_MC_ADVANCE_RIP();
15585
15586 IEM_MC_END();
15587 return VINF_SUCCESS;
15588}
15589
15590
15591/** Opcode 0xdc !11/0. */
15592FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
15593{
15594 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
15595 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
15596}
15597
15598
15599/** Opcode 0xdc !11/1. */
15600FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
15601{
15602 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
15603 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
15604}
15605
15606
15607/** Opcode 0xdc !11/2. */
15608FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
15609{
15610 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
15611
15612 IEM_MC_BEGIN(3, 3);
15613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15614 IEM_MC_LOCAL(uint16_t, u16Fsw);
15615 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15616 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15617 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15618 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15619
15620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15622
15623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15624 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15625 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15626
15627 IEM_MC_PREPARE_FPU_USAGE();
15628 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15629 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15630 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15631 IEM_MC_ELSE()
15632 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15633 IEM_MC_ENDIF();
15634 IEM_MC_ADVANCE_RIP();
15635
15636 IEM_MC_END();
15637 return VINF_SUCCESS;
15638}
15639
15640
15641/** Opcode 0xdc !11/3. */
15642FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
15643{
15644 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
15645
15646 IEM_MC_BEGIN(3, 3);
15647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15648 IEM_MC_LOCAL(uint16_t, u16Fsw);
15649 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
15650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15651 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15652 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
15653
15654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15656
15657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15659 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15660
15661 IEM_MC_PREPARE_FPU_USAGE();
15662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
15664 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15665 IEM_MC_ELSE()
15666 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15667 IEM_MC_ENDIF();
15668 IEM_MC_ADVANCE_RIP();
15669
15670 IEM_MC_END();
15671 return VINF_SUCCESS;
15672}
15673
15674
15675/** Opcode 0xdc !11/4. */
15676FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
15677{
15678 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
15679 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
15680}
15681
15682
15683/** Opcode 0xdc !11/5. */
15684FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
15685{
15686 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
15687 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
15688}
15689
15690
15691/** Opcode 0xdc !11/6. */
15692FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
15693{
15694 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
15695 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
15696}
15697
15698
15699/** Opcode 0xdc !11/7. */
15700FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
15701{
15702 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
15703 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
15704}
15705
15706
15707/** Opcode 0xdc. */
15708FNIEMOP_DEF(iemOp_EscF4)
15709{
15710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15711 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
15712 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15713 {
15714 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15715 {
15716 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
15717 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
15718 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
15719 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
15720 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
15721 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
15722 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
15723 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
15724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15725 }
15726 }
15727 else
15728 {
15729 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15730 {
15731 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
15732 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
15733 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
15734 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
15735 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
15736 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
15737 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
15738 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
15739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15740 }
15741 }
15742}
15743
15744
15745/** Opcode 0xdd !11/0.
15746 * @sa iemOp_fld_m32r */
15747FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
15748{
15749 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
15750
15751 IEM_MC_BEGIN(2, 3);
15752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15753 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15754 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
15755 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15756 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
15757
15758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15760 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15761 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15762
15763 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15764 IEM_MC_PREPARE_FPU_USAGE();
15765 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15766 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
15767 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15768 IEM_MC_ELSE()
15769 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
15770 IEM_MC_ENDIF();
15771 IEM_MC_ADVANCE_RIP();
15772
15773 IEM_MC_END();
15774 return VINF_SUCCESS;
15775}
15776
15777
15778/** Opcode 0xdd !11/0. */
15779FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
15780{
15781 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
15782 IEM_MC_BEGIN(3, 2);
15783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15784 IEM_MC_LOCAL(uint16_t, u16Fsw);
15785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15786 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15787 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15788
15789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15793
15794 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15795 IEM_MC_PREPARE_FPU_USAGE();
15796 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15797 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15798 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15799 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15800 IEM_MC_ELSE()
15801 IEM_MC_IF_FCW_IM()
15802 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15803 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15804 IEM_MC_ENDIF();
15805 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15806 IEM_MC_ENDIF();
15807 IEM_MC_ADVANCE_RIP();
15808
15809 IEM_MC_END();
15810 return VINF_SUCCESS;
15811}
15812
15813
15814/** Opcode 0xdd !11/0. */
15815FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15816{
15817 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
15818 IEM_MC_BEGIN(3, 2);
15819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15820 IEM_MC_LOCAL(uint16_t, u16Fsw);
15821 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15822 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15824
15825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15829
15830 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15831 IEM_MC_PREPARE_FPU_USAGE();
15832 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15833 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15834 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15835 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15836 IEM_MC_ELSE()
15837 IEM_MC_IF_FCW_IM()
15838 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15839 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15840 IEM_MC_ENDIF();
15841 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15842 IEM_MC_ENDIF();
15843 IEM_MC_ADVANCE_RIP();
15844
15845 IEM_MC_END();
15846 return VINF_SUCCESS;
15847}
15848
15849
15850
15851
15852/** Opcode 0xdd !11/0. */
15853FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15854{
15855 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
15856 IEM_MC_BEGIN(3, 2);
15857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15858 IEM_MC_LOCAL(uint16_t, u16Fsw);
15859 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15860 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15861 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15862
15863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15867
15868 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
15869 IEM_MC_PREPARE_FPU_USAGE();
15870 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15871 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15872 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15873 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15874 IEM_MC_ELSE()
15875 IEM_MC_IF_FCW_IM()
15876 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15877 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15878 IEM_MC_ENDIF();
15879 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
15880 IEM_MC_ENDIF();
15881 IEM_MC_ADVANCE_RIP();
15882
15883 IEM_MC_END();
15884 return VINF_SUCCESS;
15885}
15886
15887
15888/** Opcode 0xdd !11/0. */
15889FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15890{
15891 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
15892 IEM_MC_BEGIN(3, 0);
15893 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15894 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15895 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15898 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15899 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15900 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15901 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15902 IEM_MC_END();
15903 return VINF_SUCCESS;
15904}
15905
15906
15907/** Opcode 0xdd !11/0. */
15908FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15909{
15910 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
15911 IEM_MC_BEGIN(3, 0);
15912 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
15913 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15914 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15917 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15918 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15919 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
15920 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15921 IEM_MC_END();
15922 return VINF_SUCCESS;
15923
15924}
15925
15926/** Opcode 0xdd !11/0. */
15927FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15928{
15929 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
15930
15931 IEM_MC_BEGIN(0, 2);
15932 IEM_MC_LOCAL(uint16_t, u16Tmp);
15933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15934
15935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15938
15939 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
15940 IEM_MC_FETCH_FSW(u16Tmp);
15941 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
15942 IEM_MC_ADVANCE_RIP();
15943
15944/** @todo Debug / drop a hint to the verifier that things may differ
15945 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15946 * NT4SP1. (X86_FSW_PE) */
15947 IEM_MC_END();
15948 return VINF_SUCCESS;
15949}
15950
15951
15952/** Opcode 0xdd 11/0. */
15953FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15954{
15955 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
15956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15957 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15958 unmodified. */
15959
15960 IEM_MC_BEGIN(0, 0);
15961
15962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15964
15965 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
15966 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15967 IEM_MC_UPDATE_FPU_OPCODE_IP();
15968
15969 IEM_MC_ADVANCE_RIP();
15970 IEM_MC_END();
15971 return VINF_SUCCESS;
15972}
15973
15974
15975/** Opcode 0xdd 11/1. */
15976FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15977{
15978 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
15979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15980
15981 IEM_MC_BEGIN(0, 2);
15982 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15983 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15986
15987 IEM_MC_PREPARE_FPU_USAGE();
15988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15989 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15990 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15991 IEM_MC_ELSE()
15992 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15993 IEM_MC_ENDIF();
15994
15995 IEM_MC_ADVANCE_RIP();
15996 IEM_MC_END();
15997 return VINF_SUCCESS;
15998}
15999
16000
16001/** Opcode 0xdd 11/3. */
16002FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
16003{
16004 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
16005 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
16006}
16007
16008
16009/** Opcode 0xdd 11/4. */
16010FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
16011{
16012 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
16013 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
16014}
16015
16016
16017/** Opcode 0xdd. */
16018FNIEMOP_DEF(iemOp_EscF5)
16019{
16020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16021 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
16022 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16023 {
16024 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16025 {
16026 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
16027 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
16028 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
16029 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
16030 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
16031 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
16032 case 6: return IEMOP_RAISE_INVALID_OPCODE();
16033 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16035 }
16036 }
16037 else
16038 {
16039 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16040 {
16041 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
16042 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
16043 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
16044 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
16045 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
16046 case 5: return IEMOP_RAISE_INVALID_OPCODE();
16047 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
16048 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
16049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16050 }
16051 }
16052}
16053
16054
16055/** Opcode 0xde 11/0. */
16056FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
16057{
16058 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
16059 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
16060}
16061
16062
16063/** Opcode 0xde 11/0. */
16064FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
16065{
16066 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
16067 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
16068}
16069
16070
16071/** Opcode 0xde 0xd9. */
16072FNIEMOP_DEF(iemOp_fcompp)
16073{
16074 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
16075 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
16076}
16077
16078
16079/** Opcode 0xde 11/4. */
16080FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
16081{
16082 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
16083 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
16084}
16085
16086
16087/** Opcode 0xde 11/5. */
16088FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
16089{
16090 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
16091 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
16092}
16093
16094
16095/** Opcode 0xde 11/6. */
16096FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
16097{
16098 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
16099 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
16100}
16101
16102
16103/** Opcode 0xde 11/7. */
16104FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
16105{
16106 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
16107 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
16108}
16109
16110
16111/**
16112 * Common worker for FPU instructions working on ST0 and an m16i, and storing
16113 * the result in ST0.
16114 *
16115 * @param pfnAImpl Pointer to the instruction implementation (assembly).
16116 */
16117FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
16118{
16119 IEM_MC_BEGIN(3, 3);
16120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16121 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16122 IEM_MC_LOCAL(int16_t, i16Val2);
16123 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16125 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16126
16127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16129
16130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16132 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16133
16134 IEM_MC_PREPARE_FPU_USAGE();
16135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16136 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
16137 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
16138 IEM_MC_ELSE()
16139 IEM_MC_FPU_STACK_UNDERFLOW(0);
16140 IEM_MC_ENDIF();
16141 IEM_MC_ADVANCE_RIP();
16142
16143 IEM_MC_END();
16144 return VINF_SUCCESS;
16145}
16146
16147
16148/** Opcode 0xde !11/0. */
16149FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
16150{
16151 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
16152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
16153}
16154
16155
16156/** Opcode 0xde !11/1. */
16157FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
16158{
16159 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
16160 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
16161}
16162
16163
16164/** Opcode 0xde !11/2. */
16165FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
16166{
16167 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
16168
16169 IEM_MC_BEGIN(3, 3);
16170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16171 IEM_MC_LOCAL(uint16_t, u16Fsw);
16172 IEM_MC_LOCAL(int16_t, i16Val2);
16173 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16174 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16175 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16176
16177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16179
16180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16182 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16183
16184 IEM_MC_PREPARE_FPU_USAGE();
16185 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16186 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16187 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16188 IEM_MC_ELSE()
16189 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16190 IEM_MC_ENDIF();
16191 IEM_MC_ADVANCE_RIP();
16192
16193 IEM_MC_END();
16194 return VINF_SUCCESS;
16195}
16196
16197
16198/** Opcode 0xde !11/3. */
16199FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
16200{
16201 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
16202
16203 IEM_MC_BEGIN(3, 3);
16204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16205 IEM_MC_LOCAL(uint16_t, u16Fsw);
16206 IEM_MC_LOCAL(int16_t, i16Val2);
16207 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16208 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
16209 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
16210
16211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16213
16214 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16215 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16216 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16217
16218 IEM_MC_PREPARE_FPU_USAGE();
16219 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
16220 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
16221 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16222 IEM_MC_ELSE()
16223 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16224 IEM_MC_ENDIF();
16225 IEM_MC_ADVANCE_RIP();
16226
16227 IEM_MC_END();
16228 return VINF_SUCCESS;
16229}
16230
16231
16232/** Opcode 0xde !11/4. */
16233FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
16234{
16235 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
16236 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
16237}
16238
16239
16240/** Opcode 0xde !11/5. */
16241FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
16242{
16243 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
16244 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
16245}
16246
16247
16248/** Opcode 0xde !11/6. */
16249FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
16250{
16251 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
16252 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
16253}
16254
16255
16256/** Opcode 0xde !11/7. */
16257FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
16258{
16259 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
16260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
16261}
16262
16263
16264/** Opcode 0xde. */
16265FNIEMOP_DEF(iemOp_EscF6)
16266{
16267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16268 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
16269 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16270 {
16271 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16272 {
16273 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
16274 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
16275 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
16276 case 3: if (bRm == 0xd9)
16277 return FNIEMOP_CALL(iemOp_fcompp);
16278 return IEMOP_RAISE_INVALID_OPCODE();
16279 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
16280 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
16281 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
16282 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
16283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16284 }
16285 }
16286 else
16287 {
16288 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16289 {
16290 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
16291 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
16292 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
16293 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
16294 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
16295 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
16296 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
16297 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
16298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16299 }
16300 }
16301}
16302
16303
16304/** Opcode 0xdf 11/0.
16305 * Undocument instruction, assumed to work like ffree + fincstp. */
16306FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
16307{
16308 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
16309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16310
16311 IEM_MC_BEGIN(0, 0);
16312
16313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16315
16316 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
16317 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
16318 IEM_MC_FPU_STACK_INC_TOP();
16319 IEM_MC_UPDATE_FPU_OPCODE_IP();
16320
16321 IEM_MC_ADVANCE_RIP();
16322 IEM_MC_END();
16323 return VINF_SUCCESS;
16324}
16325
16326
16327/** Opcode 0xdf 0xe0. */
16328FNIEMOP_DEF(iemOp_fnstsw_ax)
16329{
16330 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
16331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16332
16333 IEM_MC_BEGIN(0, 1);
16334 IEM_MC_LOCAL(uint16_t, u16Tmp);
16335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16336 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
16337 IEM_MC_FETCH_FSW(u16Tmp);
16338 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
16339 IEM_MC_ADVANCE_RIP();
16340 IEM_MC_END();
16341 return VINF_SUCCESS;
16342}
16343
16344
16345/** Opcode 0xdf 11/5. */
16346FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
16347{
16348 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
16349 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16350}
16351
16352
16353/** Opcode 0xdf 11/6. */
16354FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
16355{
16356 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
16357 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
16358}
16359
16360
16361/** Opcode 0xdf !11/0. */
16362FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
16363{
16364 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
16365
16366 IEM_MC_BEGIN(2, 3);
16367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16368 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16369 IEM_MC_LOCAL(int16_t, i16Val);
16370 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16371 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
16372
16373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16375
16376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16378 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16379
16380 IEM_MC_PREPARE_FPU_USAGE();
16381 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16382 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
16383 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16384 IEM_MC_ELSE()
16385 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16386 IEM_MC_ENDIF();
16387 IEM_MC_ADVANCE_RIP();
16388
16389 IEM_MC_END();
16390 return VINF_SUCCESS;
16391}
16392
16393
16394/** Opcode 0xdf !11/1. */
16395FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
16396{
16397 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
16398 IEM_MC_BEGIN(3, 2);
16399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16400 IEM_MC_LOCAL(uint16_t, u16Fsw);
16401 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16402 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16404
16405 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16409
16410 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16411 IEM_MC_PREPARE_FPU_USAGE();
16412 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16413 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16414 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16415 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16416 IEM_MC_ELSE()
16417 IEM_MC_IF_FCW_IM()
16418 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16419 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16420 IEM_MC_ENDIF();
16421 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16422 IEM_MC_ENDIF();
16423 IEM_MC_ADVANCE_RIP();
16424
16425 IEM_MC_END();
16426 return VINF_SUCCESS;
16427}
16428
16429
16430/** Opcode 0xdf !11/2. */
16431FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
16432{
16433 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
16434 IEM_MC_BEGIN(3, 2);
16435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16436 IEM_MC_LOCAL(uint16_t, u16Fsw);
16437 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16438 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16439 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16440
16441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16445
16446 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16447 IEM_MC_PREPARE_FPU_USAGE();
16448 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16449 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16450 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16451 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16452 IEM_MC_ELSE()
16453 IEM_MC_IF_FCW_IM()
16454 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16455 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16456 IEM_MC_ENDIF();
16457 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16458 IEM_MC_ENDIF();
16459 IEM_MC_ADVANCE_RIP();
16460
16461 IEM_MC_END();
16462 return VINF_SUCCESS;
16463}
16464
16465
16466/** Opcode 0xdf !11/3. */
16467FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
16468{
16469 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
16470 IEM_MC_BEGIN(3, 2);
16471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16472 IEM_MC_LOCAL(uint16_t, u16Fsw);
16473 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16474 IEM_MC_ARG(int16_t *, pi16Dst, 1);
16475 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16476
16477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16481
16482 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16483 IEM_MC_PREPARE_FPU_USAGE();
16484 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16485 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
16486 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
16487 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16488 IEM_MC_ELSE()
16489 IEM_MC_IF_FCW_IM()
16490 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
16491 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
16492 IEM_MC_ENDIF();
16493 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16494 IEM_MC_ENDIF();
16495 IEM_MC_ADVANCE_RIP();
16496
16497 IEM_MC_END();
16498 return VINF_SUCCESS;
16499}
16500
16501
16502/** Opcode 0xdf !11/4. */
16503FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
16504
16505
16506/** Opcode 0xdf !11/5. */
16507FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
16508{
16509 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
16510
16511 IEM_MC_BEGIN(2, 3);
16512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16513 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
16514 IEM_MC_LOCAL(int64_t, i64Val);
16515 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
16516 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
16517
16518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16520
16521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16523 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16524
16525 IEM_MC_PREPARE_FPU_USAGE();
16526 IEM_MC_IF_FPUREG_IS_EMPTY(7)
16527 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
16528 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16529 IEM_MC_ELSE()
16530 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
16531 IEM_MC_ENDIF();
16532 IEM_MC_ADVANCE_RIP();
16533
16534 IEM_MC_END();
16535 return VINF_SUCCESS;
16536}
16537
16538
16539/** Opcode 0xdf !11/6. */
16540FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
16541
16542
16543/** Opcode 0xdf !11/7. */
16544FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
16545{
16546 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
16547 IEM_MC_BEGIN(3, 2);
16548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16549 IEM_MC_LOCAL(uint16_t, u16Fsw);
16550 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
16551 IEM_MC_ARG(int64_t *, pi64Dst, 1);
16552 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
16553
16554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
16557 IEM_MC_MAYBE_RAISE_FPU_XCPT();
16558
16559 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
16560 IEM_MC_PREPARE_FPU_USAGE();
16561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
16562 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
16563 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
16564 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16565 IEM_MC_ELSE()
16566 IEM_MC_IF_FCW_IM()
16567 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
16568 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
16569 IEM_MC_ENDIF();
16570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
16571 IEM_MC_ENDIF();
16572 IEM_MC_ADVANCE_RIP();
16573
16574 IEM_MC_END();
16575 return VINF_SUCCESS;
16576}
16577
16578
16579/** Opcode 0xdf. */
16580FNIEMOP_DEF(iemOp_EscF7)
16581{
16582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16583 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16584 {
16585 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16586 {
16587 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
16588 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
16589 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16590 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
16591 case 4: if (bRm == 0xe0)
16592 return FNIEMOP_CALL(iemOp_fnstsw_ax);
16593 return IEMOP_RAISE_INVALID_OPCODE();
16594 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
16595 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
16596 case 7: return IEMOP_RAISE_INVALID_OPCODE();
16597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16598 }
16599 }
16600 else
16601 {
16602 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16603 {
16604 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
16605 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
16606 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
16607 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
16608 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
16609 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
16610 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
16611 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
16612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16613 }
16614 }
16615}
16616
16617
16618/** Opcode 0xe0. */
16619FNIEMOP_DEF(iemOp_loopne_Jb)
16620{
16621 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
16622 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16624 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16625
16626 switch (pVCpu->iem.s.enmEffAddrMode)
16627 {
16628 case IEMMODE_16BIT:
16629 IEM_MC_BEGIN(0,0);
16630 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16631 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16632 IEM_MC_REL_JMP_S8(i8Imm);
16633 } IEM_MC_ELSE() {
16634 IEM_MC_ADVANCE_RIP();
16635 } IEM_MC_ENDIF();
16636 IEM_MC_END();
16637 return VINF_SUCCESS;
16638
16639 case IEMMODE_32BIT:
16640 IEM_MC_BEGIN(0,0);
16641 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16642 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16643 IEM_MC_REL_JMP_S8(i8Imm);
16644 } IEM_MC_ELSE() {
16645 IEM_MC_ADVANCE_RIP();
16646 } IEM_MC_ENDIF();
16647 IEM_MC_END();
16648 return VINF_SUCCESS;
16649
16650 case IEMMODE_64BIT:
16651 IEM_MC_BEGIN(0,0);
16652 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16653 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
16654 IEM_MC_REL_JMP_S8(i8Imm);
16655 } IEM_MC_ELSE() {
16656 IEM_MC_ADVANCE_RIP();
16657 } IEM_MC_ENDIF();
16658 IEM_MC_END();
16659 return VINF_SUCCESS;
16660
16661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16662 }
16663}
16664
16665
16666/** Opcode 0xe1. */
16667FNIEMOP_DEF(iemOp_loope_Jb)
16668{
16669 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
16670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16673
16674 switch (pVCpu->iem.s.enmEffAddrMode)
16675 {
16676 case IEMMODE_16BIT:
16677 IEM_MC_BEGIN(0,0);
16678 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16679 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16680 IEM_MC_REL_JMP_S8(i8Imm);
16681 } IEM_MC_ELSE() {
16682 IEM_MC_ADVANCE_RIP();
16683 } IEM_MC_ENDIF();
16684 IEM_MC_END();
16685 return VINF_SUCCESS;
16686
16687 case IEMMODE_32BIT:
16688 IEM_MC_BEGIN(0,0);
16689 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16690 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16691 IEM_MC_REL_JMP_S8(i8Imm);
16692 } IEM_MC_ELSE() {
16693 IEM_MC_ADVANCE_RIP();
16694 } IEM_MC_ENDIF();
16695 IEM_MC_END();
16696 return VINF_SUCCESS;
16697
16698 case IEMMODE_64BIT:
16699 IEM_MC_BEGIN(0,0);
16700 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16701 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
16702 IEM_MC_REL_JMP_S8(i8Imm);
16703 } IEM_MC_ELSE() {
16704 IEM_MC_ADVANCE_RIP();
16705 } IEM_MC_ENDIF();
16706 IEM_MC_END();
16707 return VINF_SUCCESS;
16708
16709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16710 }
16711}
16712
16713
16714/** Opcode 0xe2. */
16715FNIEMOP_DEF(iemOp_loop_Jb)
16716{
16717 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
16718 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16721
16722 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
16723 * using the 32-bit operand size override. How can that be restarted? See
16724 * weird pseudo code in intel manual. */
16725 switch (pVCpu->iem.s.enmEffAddrMode)
16726 {
16727 case IEMMODE_16BIT:
16728 IEM_MC_BEGIN(0,0);
16729 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16730 {
16731 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
16732 IEM_MC_IF_CX_IS_NZ() {
16733 IEM_MC_REL_JMP_S8(i8Imm);
16734 } IEM_MC_ELSE() {
16735 IEM_MC_ADVANCE_RIP();
16736 } IEM_MC_ENDIF();
16737 }
16738 else
16739 {
16740 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
16741 IEM_MC_ADVANCE_RIP();
16742 }
16743 IEM_MC_END();
16744 return VINF_SUCCESS;
16745
16746 case IEMMODE_32BIT:
16747 IEM_MC_BEGIN(0,0);
16748 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16749 {
16750 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
16751 IEM_MC_IF_ECX_IS_NZ() {
16752 IEM_MC_REL_JMP_S8(i8Imm);
16753 } IEM_MC_ELSE() {
16754 IEM_MC_ADVANCE_RIP();
16755 } IEM_MC_ENDIF();
16756 }
16757 else
16758 {
16759 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
16760 IEM_MC_ADVANCE_RIP();
16761 }
16762 IEM_MC_END();
16763 return VINF_SUCCESS;
16764
16765 case IEMMODE_64BIT:
16766 IEM_MC_BEGIN(0,0);
16767 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
16768 {
16769 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
16770 IEM_MC_IF_RCX_IS_NZ() {
16771 IEM_MC_REL_JMP_S8(i8Imm);
16772 } IEM_MC_ELSE() {
16773 IEM_MC_ADVANCE_RIP();
16774 } IEM_MC_ENDIF();
16775 }
16776 else
16777 {
16778 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
16779 IEM_MC_ADVANCE_RIP();
16780 }
16781 IEM_MC_END();
16782 return VINF_SUCCESS;
16783
16784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16785 }
16786}
16787
16788
16789/** Opcode 0xe3. */
16790FNIEMOP_DEF(iemOp_jecxz_Jb)
16791{
16792 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
16793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16796
16797 switch (pVCpu->iem.s.enmEffAddrMode)
16798 {
16799 case IEMMODE_16BIT:
16800 IEM_MC_BEGIN(0,0);
16801 IEM_MC_IF_CX_IS_NZ() {
16802 IEM_MC_ADVANCE_RIP();
16803 } IEM_MC_ELSE() {
16804 IEM_MC_REL_JMP_S8(i8Imm);
16805 } IEM_MC_ENDIF();
16806 IEM_MC_END();
16807 return VINF_SUCCESS;
16808
16809 case IEMMODE_32BIT:
16810 IEM_MC_BEGIN(0,0);
16811 IEM_MC_IF_ECX_IS_NZ() {
16812 IEM_MC_ADVANCE_RIP();
16813 } IEM_MC_ELSE() {
16814 IEM_MC_REL_JMP_S8(i8Imm);
16815 } IEM_MC_ENDIF();
16816 IEM_MC_END();
16817 return VINF_SUCCESS;
16818
16819 case IEMMODE_64BIT:
16820 IEM_MC_BEGIN(0,0);
16821 IEM_MC_IF_RCX_IS_NZ() {
16822 IEM_MC_ADVANCE_RIP();
16823 } IEM_MC_ELSE() {
16824 IEM_MC_REL_JMP_S8(i8Imm);
16825 } IEM_MC_ENDIF();
16826 IEM_MC_END();
16827 return VINF_SUCCESS;
16828
16829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16830 }
16831}
16832
16833
16834/** Opcode 0xe4 */
16835FNIEMOP_DEF(iemOp_in_AL_Ib)
16836{
16837 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
16838 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16840 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16841}
16842
16843
16844/** Opcode 0xe5 */
16845FNIEMOP_DEF(iemOp_in_eAX_Ib)
16846{
16847 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
16848 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16850 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16851}
16852
16853
16854/** Opcode 0xe6 */
16855FNIEMOP_DEF(iemOp_out_Ib_AL)
16856{
16857 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
16858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16860 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16861}
16862
16863
16864/** Opcode 0xe7 */
16865FNIEMOP_DEF(iemOp_out_Ib_eAX)
16866{
16867 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
16868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16870 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16871}
16872
16873
16874/** Opcode 0xe8. */
16875FNIEMOP_DEF(iemOp_call_Jv)
16876{
16877 IEMOP_MNEMONIC(call_Jv, "call Jv");
16878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16879 switch (pVCpu->iem.s.enmEffOpSize)
16880 {
16881 case IEMMODE_16BIT:
16882 {
16883 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16884 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16885 }
16886
16887 case IEMMODE_32BIT:
16888 {
16889 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16890 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16891 }
16892
16893 case IEMMODE_64BIT:
16894 {
16895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16896 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16897 }
16898
16899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16900 }
16901}
16902
16903
16904/** Opcode 0xe9. */
16905FNIEMOP_DEF(iemOp_jmp_Jv)
16906{
16907 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
16908 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16909 switch (pVCpu->iem.s.enmEffOpSize)
16910 {
16911 case IEMMODE_16BIT:
16912 {
16913 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16914 IEM_MC_BEGIN(0, 0);
16915 IEM_MC_REL_JMP_S16(i16Imm);
16916 IEM_MC_END();
16917 return VINF_SUCCESS;
16918 }
16919
16920 case IEMMODE_64BIT:
16921 case IEMMODE_32BIT:
16922 {
16923 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16924 IEM_MC_BEGIN(0, 0);
16925 IEM_MC_REL_JMP_S32(i32Imm);
16926 IEM_MC_END();
16927 return VINF_SUCCESS;
16928 }
16929
16930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16931 }
16932}
16933
16934
16935/** Opcode 0xea. */
16936FNIEMOP_DEF(iemOp_jmp_Ap)
16937{
16938 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
16939 IEMOP_HLP_NO_64BIT();
16940
16941 /* Decode the far pointer address and pass it on to the far call C implementation. */
16942 uint32_t offSeg;
16943 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
16944 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16945 else
16946 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16947 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16949 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
16950}
16951
16952
16953/** Opcode 0xeb. */
16954FNIEMOP_DEF(iemOp_jmp_Jb)
16955{
16956 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
16957 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16959 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16960
16961 IEM_MC_BEGIN(0, 0);
16962 IEM_MC_REL_JMP_S8(i8Imm);
16963 IEM_MC_END();
16964 return VINF_SUCCESS;
16965}
16966
16967
16968/** Opcode 0xec */
16969FNIEMOP_DEF(iemOp_in_AL_DX)
16970{
16971 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
16972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16973 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16974}
16975
16976
16977/** Opcode 0xed */
16978FNIEMOP_DEF(iemOp_eAX_DX)
16979{
16980 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
16981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16982 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16983}
16984
16985
16986/** Opcode 0xee */
16987FNIEMOP_DEF(iemOp_out_DX_AL)
16988{
16989 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
16990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16991 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16992}
16993
16994
16995/** Opcode 0xef */
16996FNIEMOP_DEF(iemOp_out_DX_eAX)
16997{
16998 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
16999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17000 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
17001}
17002
17003
17004/** Opcode 0xf0. */
17005FNIEMOP_DEF(iemOp_lock)
17006{
17007 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
17008 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
17009
17010 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17011 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17012}
17013
17014
17015/** Opcode 0xf1. */
17016FNIEMOP_DEF(iemOp_int_1)
17017{
17018 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
17019 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
17020 /** @todo testcase! */
17021 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
17022}
17023
17024
17025/** Opcode 0xf2. */
17026FNIEMOP_DEF(iemOp_repne)
17027{
17028 /* This overrides any previous REPE prefix. */
17029 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
17030 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
17031 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
17032
17033 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17034 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17035}
17036
17037
17038/** Opcode 0xf3. */
17039FNIEMOP_DEF(iemOp_repe)
17040{
17041 /* This overrides any previous REPNE prefix. */
17042 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
17043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
17044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
17045
17046 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
17047 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
17048}
17049
17050
17051/** Opcode 0xf4. */
17052FNIEMOP_DEF(iemOp_hlt)
17053{
17054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17055 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
17056}
17057
17058
17059/** Opcode 0xf5. */
17060FNIEMOP_DEF(iemOp_cmc)
17061{
17062 IEMOP_MNEMONIC(cmc, "cmc");
17063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17064 IEM_MC_BEGIN(0, 0);
17065 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
17066 IEM_MC_ADVANCE_RIP();
17067 IEM_MC_END();
17068 return VINF_SUCCESS;
17069}
17070
17071
17072/**
17073 * Common implementation of 'inc/dec/not/neg Eb'.
17074 *
17075 * @param bRm The RM byte.
17076 * @param pImpl The instruction implementation.
17077 */
17078FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17079{
17080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17081 {
17082 /* register access */
17083 IEM_MC_BEGIN(2, 0);
17084 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17085 IEM_MC_ARG(uint32_t *, pEFlags, 1);
17086 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17087 IEM_MC_REF_EFLAGS(pEFlags);
17088 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17089 IEM_MC_ADVANCE_RIP();
17090 IEM_MC_END();
17091 }
17092 else
17093 {
17094 /* memory access. */
17095 IEM_MC_BEGIN(2, 2);
17096 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17097 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17099
17100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17101 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17102 IEM_MC_FETCH_EFLAGS(EFlags);
17103 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17104 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
17105 else
17106 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
17107
17108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
17109 IEM_MC_COMMIT_EFLAGS(EFlags);
17110 IEM_MC_ADVANCE_RIP();
17111 IEM_MC_END();
17112 }
17113 return VINF_SUCCESS;
17114}
17115
17116
17117/**
17118 * Common implementation of 'inc/dec/not/neg Ev'.
17119 *
17120 * @param bRm The RM byte.
17121 * @param pImpl The instruction implementation.
17122 */
17123FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
17124{
17125 /* Registers are handled by a common worker. */
17126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17127 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17128
17129 /* Memory we do here. */
17130 switch (pVCpu->iem.s.enmEffOpSize)
17131 {
17132 case IEMMODE_16BIT:
17133 IEM_MC_BEGIN(2, 2);
17134 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17137
17138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17139 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17140 IEM_MC_FETCH_EFLAGS(EFlags);
17141 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17142 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
17143 else
17144 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
17145
17146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
17147 IEM_MC_COMMIT_EFLAGS(EFlags);
17148 IEM_MC_ADVANCE_RIP();
17149 IEM_MC_END();
17150 return VINF_SUCCESS;
17151
17152 case IEMMODE_32BIT:
17153 IEM_MC_BEGIN(2, 2);
17154 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17155 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17157
17158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17159 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17160 IEM_MC_FETCH_EFLAGS(EFlags);
17161 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17162 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
17163 else
17164 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
17165
17166 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
17167 IEM_MC_COMMIT_EFLAGS(EFlags);
17168 IEM_MC_ADVANCE_RIP();
17169 IEM_MC_END();
17170 return VINF_SUCCESS;
17171
17172 case IEMMODE_64BIT:
17173 IEM_MC_BEGIN(2, 2);
17174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
17176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17177
17178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17179 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17180 IEM_MC_FETCH_EFLAGS(EFlags);
17181 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
17182 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
17183 else
17184 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
17185
17186 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
17187 IEM_MC_COMMIT_EFLAGS(EFlags);
17188 IEM_MC_ADVANCE_RIP();
17189 IEM_MC_END();
17190 return VINF_SUCCESS;
17191
17192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17193 }
17194}
17195
17196
17197/** Opcode 0xf6 /0. */
17198FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
17199{
17200 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
17201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17202
17203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17204 {
17205 /* register access */
17206 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17208
17209 IEM_MC_BEGIN(3, 0);
17210 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17211 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
17212 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17213 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17214 IEM_MC_REF_EFLAGS(pEFlags);
17215 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17216 IEM_MC_ADVANCE_RIP();
17217 IEM_MC_END();
17218 }
17219 else
17220 {
17221 /* memory access. */
17222 IEM_MC_BEGIN(3, 2);
17223 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
17224 IEM_MC_ARG(uint8_t, u8Src, 1);
17225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17227
17228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
17229 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
17230 IEM_MC_ASSIGN(u8Src, u8Imm);
17231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17232 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17233 IEM_MC_FETCH_EFLAGS(EFlags);
17234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
17235
17236 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
17237 IEM_MC_COMMIT_EFLAGS(EFlags);
17238 IEM_MC_ADVANCE_RIP();
17239 IEM_MC_END();
17240 }
17241 return VINF_SUCCESS;
17242}
17243
17244
17245/** Opcode 0xf7 /0. */
17246FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
17247{
17248 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
17249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
17250
17251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17252 {
17253 /* register access */
17254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17255 switch (pVCpu->iem.s.enmEffOpSize)
17256 {
17257 case IEMMODE_16BIT:
17258 {
17259 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17260 IEM_MC_BEGIN(3, 0);
17261 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17262 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
17263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17264 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17265 IEM_MC_REF_EFLAGS(pEFlags);
17266 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17267 IEM_MC_ADVANCE_RIP();
17268 IEM_MC_END();
17269 return VINF_SUCCESS;
17270 }
17271
17272 case IEMMODE_32BIT:
17273 {
17274 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17275 IEM_MC_BEGIN(3, 0);
17276 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17277 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
17278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17279 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17280 IEM_MC_REF_EFLAGS(pEFlags);
17281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17282 /* No clearing the high dword here - test doesn't write back the result. */
17283 IEM_MC_ADVANCE_RIP();
17284 IEM_MC_END();
17285 return VINF_SUCCESS;
17286 }
17287
17288 case IEMMODE_64BIT:
17289 {
17290 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17291 IEM_MC_BEGIN(3, 0);
17292 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17293 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
17294 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17295 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17296 IEM_MC_REF_EFLAGS(pEFlags);
17297 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17298 IEM_MC_ADVANCE_RIP();
17299 IEM_MC_END();
17300 return VINF_SUCCESS;
17301 }
17302
17303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17304 }
17305 }
17306 else
17307 {
17308 /* memory access. */
17309 switch (pVCpu->iem.s.enmEffOpSize)
17310 {
17311 case IEMMODE_16BIT:
17312 {
17313 IEM_MC_BEGIN(3, 2);
17314 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
17315 IEM_MC_ARG(uint16_t, u16Src, 1);
17316 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17318
17319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
17320 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
17321 IEM_MC_ASSIGN(u16Src, u16Imm);
17322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17323 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17324 IEM_MC_FETCH_EFLAGS(EFlags);
17325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
17326
17327 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
17328 IEM_MC_COMMIT_EFLAGS(EFlags);
17329 IEM_MC_ADVANCE_RIP();
17330 IEM_MC_END();
17331 return VINF_SUCCESS;
17332 }
17333
17334 case IEMMODE_32BIT:
17335 {
17336 IEM_MC_BEGIN(3, 2);
17337 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
17338 IEM_MC_ARG(uint32_t, u32Src, 1);
17339 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17341
17342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17343 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
17344 IEM_MC_ASSIGN(u32Src, u32Imm);
17345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17346 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17347 IEM_MC_FETCH_EFLAGS(EFlags);
17348 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
17349
17350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
17351 IEM_MC_COMMIT_EFLAGS(EFlags);
17352 IEM_MC_ADVANCE_RIP();
17353 IEM_MC_END();
17354 return VINF_SUCCESS;
17355 }
17356
17357 case IEMMODE_64BIT:
17358 {
17359 IEM_MC_BEGIN(3, 2);
17360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
17361 IEM_MC_ARG(uint64_t, u64Src, 1);
17362 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
17363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17364
17365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
17366 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
17367 IEM_MC_ASSIGN(u64Src, u64Imm);
17368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17369 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
17370 IEM_MC_FETCH_EFLAGS(EFlags);
17371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
17372
17373 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
17374 IEM_MC_COMMIT_EFLAGS(EFlags);
17375 IEM_MC_ADVANCE_RIP();
17376 IEM_MC_END();
17377 return VINF_SUCCESS;
17378 }
17379
17380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17381 }
17382 }
17383}
17384
17385
17386/** Opcode 0xf6 /4, /5, /6 and /7. */
17387FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
17388{
17389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17390 {
17391 /* register access */
17392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17393 IEM_MC_BEGIN(3, 1);
17394 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17395 IEM_MC_ARG(uint8_t, u8Value, 1);
17396 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17397 IEM_MC_LOCAL(int32_t, rc);
17398
17399 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17400 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17401 IEM_MC_REF_EFLAGS(pEFlags);
17402 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17403 IEM_MC_IF_LOCAL_IS_Z(rc) {
17404 IEM_MC_ADVANCE_RIP();
17405 } IEM_MC_ELSE() {
17406 IEM_MC_RAISE_DIVIDE_ERROR();
17407 } IEM_MC_ENDIF();
17408
17409 IEM_MC_END();
17410 }
17411 else
17412 {
17413 /* memory access. */
17414 IEM_MC_BEGIN(3, 2);
17415 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17416 IEM_MC_ARG(uint8_t, u8Value, 1);
17417 IEM_MC_ARG(uint32_t *, pEFlags, 2);
17418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17419 IEM_MC_LOCAL(int32_t, rc);
17420
17421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17423 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17424 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17425 IEM_MC_REF_EFLAGS(pEFlags);
17426 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
17427 IEM_MC_IF_LOCAL_IS_Z(rc) {
17428 IEM_MC_ADVANCE_RIP();
17429 } IEM_MC_ELSE() {
17430 IEM_MC_RAISE_DIVIDE_ERROR();
17431 } IEM_MC_ENDIF();
17432
17433 IEM_MC_END();
17434 }
17435 return VINF_SUCCESS;
17436}
17437
17438
17439/** Opcode 0xf7 /4, /5, /6 and /7. */
17440FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
17441{
17442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17443
17444 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17445 {
17446 /* register access */
17447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17448 switch (pVCpu->iem.s.enmEffOpSize)
17449 {
17450 case IEMMODE_16BIT:
17451 {
17452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17453 IEM_MC_BEGIN(4, 1);
17454 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17455 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17456 IEM_MC_ARG(uint16_t, u16Value, 2);
17457 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17458 IEM_MC_LOCAL(int32_t, rc);
17459
17460 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17461 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17462 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17463 IEM_MC_REF_EFLAGS(pEFlags);
17464 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17465 IEM_MC_IF_LOCAL_IS_Z(rc) {
17466 IEM_MC_ADVANCE_RIP();
17467 } IEM_MC_ELSE() {
17468 IEM_MC_RAISE_DIVIDE_ERROR();
17469 } IEM_MC_ENDIF();
17470
17471 IEM_MC_END();
17472 return VINF_SUCCESS;
17473 }
17474
17475 case IEMMODE_32BIT:
17476 {
17477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17478 IEM_MC_BEGIN(4, 1);
17479 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17480 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17481 IEM_MC_ARG(uint32_t, u32Value, 2);
17482 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17483 IEM_MC_LOCAL(int32_t, rc);
17484
17485 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17486 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17487 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17488 IEM_MC_REF_EFLAGS(pEFlags);
17489 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17490 IEM_MC_IF_LOCAL_IS_Z(rc) {
17491 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17492 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17493 IEM_MC_ADVANCE_RIP();
17494 } IEM_MC_ELSE() {
17495 IEM_MC_RAISE_DIVIDE_ERROR();
17496 } IEM_MC_ENDIF();
17497
17498 IEM_MC_END();
17499 return VINF_SUCCESS;
17500 }
17501
17502 case IEMMODE_64BIT:
17503 {
17504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17505 IEM_MC_BEGIN(4, 1);
17506 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17507 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17508 IEM_MC_ARG(uint64_t, u64Value, 2);
17509 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17510 IEM_MC_LOCAL(int32_t, rc);
17511
17512 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17513 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17514 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17515 IEM_MC_REF_EFLAGS(pEFlags);
17516 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17517 IEM_MC_IF_LOCAL_IS_Z(rc) {
17518 IEM_MC_ADVANCE_RIP();
17519 } IEM_MC_ELSE() {
17520 IEM_MC_RAISE_DIVIDE_ERROR();
17521 } IEM_MC_ENDIF();
17522
17523 IEM_MC_END();
17524 return VINF_SUCCESS;
17525 }
17526
17527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17528 }
17529 }
17530 else
17531 {
17532 /* memory access. */
17533 switch (pVCpu->iem.s.enmEffOpSize)
17534 {
17535 case IEMMODE_16BIT:
17536 {
17537 IEM_MC_BEGIN(4, 2);
17538 IEM_MC_ARG(uint16_t *, pu16AX, 0);
17539 IEM_MC_ARG(uint16_t *, pu16DX, 1);
17540 IEM_MC_ARG(uint16_t, u16Value, 2);
17541 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17543 IEM_MC_LOCAL(int32_t, rc);
17544
17545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17547 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17548 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
17549 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
17550 IEM_MC_REF_EFLAGS(pEFlags);
17551 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
17552 IEM_MC_IF_LOCAL_IS_Z(rc) {
17553 IEM_MC_ADVANCE_RIP();
17554 } IEM_MC_ELSE() {
17555 IEM_MC_RAISE_DIVIDE_ERROR();
17556 } IEM_MC_ENDIF();
17557
17558 IEM_MC_END();
17559 return VINF_SUCCESS;
17560 }
17561
17562 case IEMMODE_32BIT:
17563 {
17564 IEM_MC_BEGIN(4, 2);
17565 IEM_MC_ARG(uint32_t *, pu32AX, 0);
17566 IEM_MC_ARG(uint32_t *, pu32DX, 1);
17567 IEM_MC_ARG(uint32_t, u32Value, 2);
17568 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17570 IEM_MC_LOCAL(int32_t, rc);
17571
17572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17574 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17575 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
17576 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
17577 IEM_MC_REF_EFLAGS(pEFlags);
17578 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
17579 IEM_MC_IF_LOCAL_IS_Z(rc) {
17580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
17581 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
17582 IEM_MC_ADVANCE_RIP();
17583 } IEM_MC_ELSE() {
17584 IEM_MC_RAISE_DIVIDE_ERROR();
17585 } IEM_MC_ENDIF();
17586
17587 IEM_MC_END();
17588 return VINF_SUCCESS;
17589 }
17590
17591 case IEMMODE_64BIT:
17592 {
17593 IEM_MC_BEGIN(4, 2);
17594 IEM_MC_ARG(uint64_t *, pu64AX, 0);
17595 IEM_MC_ARG(uint64_t *, pu64DX, 1);
17596 IEM_MC_ARG(uint64_t, u64Value, 2);
17597 IEM_MC_ARG(uint32_t *, pEFlags, 3);
17598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
17599 IEM_MC_LOCAL(int32_t, rc);
17600
17601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
17602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17603 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
17604 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
17605 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
17606 IEM_MC_REF_EFLAGS(pEFlags);
17607 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
17608 IEM_MC_IF_LOCAL_IS_Z(rc) {
17609 IEM_MC_ADVANCE_RIP();
17610 } IEM_MC_ELSE() {
17611 IEM_MC_RAISE_DIVIDE_ERROR();
17612 } IEM_MC_ENDIF();
17613
17614 IEM_MC_END();
17615 return VINF_SUCCESS;
17616 }
17617
17618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17619 }
17620 }
17621}
17622
17623/** Opcode 0xf6. */
17624FNIEMOP_DEF(iemOp_Grp3_Eb)
17625{
17626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17627 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17628 {
17629 case 0:
17630 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
17631 case 1:
17632/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17633 return IEMOP_RAISE_INVALID_OPCODE();
17634 case 2:
17635 IEMOP_MNEMONIC(not_Eb, "not Eb");
17636 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
17637 case 3:
17638 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
17639 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
17640 case 4:
17641 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
17642 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17643 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
17644 case 5:
17645 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
17646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17647 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
17648 case 6:
17649 IEMOP_MNEMONIC(div_Eb, "div Eb");
17650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17651 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
17652 case 7:
17653 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
17654 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17655 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
17656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17657 }
17658}
17659
17660
17661/** Opcode 0xf7. */
17662FNIEMOP_DEF(iemOp_Grp3_Ev)
17663{
17664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17665 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17666 {
17667 case 0:
17668 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
17669 case 1:
17670/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
17671 return IEMOP_RAISE_INVALID_OPCODE();
17672 case 2:
17673 IEMOP_MNEMONIC(not_Ev, "not Ev");
17674 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
17675 case 3:
17676 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
17677 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
17678 case 4:
17679 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
17680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17681 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
17682 case 5:
17683 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
17684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
17685 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
17686 case 6:
17687 IEMOP_MNEMONIC(div_Ev, "div Ev");
17688 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17689 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
17690 case 7:
17691 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
17692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
17693 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
17694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17695 }
17696}
17697
17698
17699/** Opcode 0xf8. */
17700FNIEMOP_DEF(iemOp_clc)
17701{
17702 IEMOP_MNEMONIC(clc, "clc");
17703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17704 IEM_MC_BEGIN(0, 0);
17705 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
17706 IEM_MC_ADVANCE_RIP();
17707 IEM_MC_END();
17708 return VINF_SUCCESS;
17709}
17710
17711
17712/** Opcode 0xf9. */
17713FNIEMOP_DEF(iemOp_stc)
17714{
17715 IEMOP_MNEMONIC(stc, "stc");
17716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17717 IEM_MC_BEGIN(0, 0);
17718 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
17719 IEM_MC_ADVANCE_RIP();
17720 IEM_MC_END();
17721 return VINF_SUCCESS;
17722}
17723
17724
17725/** Opcode 0xfa. */
17726FNIEMOP_DEF(iemOp_cli)
17727{
17728 IEMOP_MNEMONIC(cli, "cli");
17729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17730 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
17731}
17732
17733
17734FNIEMOP_DEF(iemOp_sti)
17735{
17736 IEMOP_MNEMONIC(sti, "sti");
17737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17738 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
17739}
17740
17741
17742/** Opcode 0xfc. */
17743FNIEMOP_DEF(iemOp_cld)
17744{
17745 IEMOP_MNEMONIC(cld, "cld");
17746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17747 IEM_MC_BEGIN(0, 0);
17748 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
17749 IEM_MC_ADVANCE_RIP();
17750 IEM_MC_END();
17751 return VINF_SUCCESS;
17752}
17753
17754
17755/** Opcode 0xfd. */
17756FNIEMOP_DEF(iemOp_std)
17757{
17758 IEMOP_MNEMONIC(std, "std");
17759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17760 IEM_MC_BEGIN(0, 0);
17761 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
17762 IEM_MC_ADVANCE_RIP();
17763 IEM_MC_END();
17764 return VINF_SUCCESS;
17765}
17766
17767
17768/** Opcode 0xfe. */
17769FNIEMOP_DEF(iemOp_Grp4)
17770{
17771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17772 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17773 {
17774 case 0:
17775 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
17776 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
17777 case 1:
17778 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
17779 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
17780 default:
17781 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
17782 return IEMOP_RAISE_INVALID_OPCODE();
17783 }
17784}
17785
17786
17787/**
17788 * Opcode 0xff /2.
17789 * @param bRm The RM byte.
17790 */
17791FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
17792{
17793 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
17794 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17795
17796 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17797 {
17798 /* The new RIP is taken from a register. */
17799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17800 switch (pVCpu->iem.s.enmEffOpSize)
17801 {
17802 case IEMMODE_16BIT:
17803 IEM_MC_BEGIN(1, 0);
17804 IEM_MC_ARG(uint16_t, u16Target, 0);
17805 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17806 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17807 IEM_MC_END()
17808 return VINF_SUCCESS;
17809
17810 case IEMMODE_32BIT:
17811 IEM_MC_BEGIN(1, 0);
17812 IEM_MC_ARG(uint32_t, u32Target, 0);
17813 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17814 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17815 IEM_MC_END()
17816 return VINF_SUCCESS;
17817
17818 case IEMMODE_64BIT:
17819 IEM_MC_BEGIN(1, 0);
17820 IEM_MC_ARG(uint64_t, u64Target, 0);
17821 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17822 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17823 IEM_MC_END()
17824 return VINF_SUCCESS;
17825
17826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17827 }
17828 }
17829 else
17830 {
17831 /* The new RIP is taken from a register. */
17832 switch (pVCpu->iem.s.enmEffOpSize)
17833 {
17834 case IEMMODE_16BIT:
17835 IEM_MC_BEGIN(1, 1);
17836 IEM_MC_ARG(uint16_t, u16Target, 0);
17837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17838 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17840 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17841 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17842 IEM_MC_END()
17843 return VINF_SUCCESS;
17844
17845 case IEMMODE_32BIT:
17846 IEM_MC_BEGIN(1, 1);
17847 IEM_MC_ARG(uint32_t, u32Target, 0);
17848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17851 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17852 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17853 IEM_MC_END()
17854 return VINF_SUCCESS;
17855
17856 case IEMMODE_64BIT:
17857 IEM_MC_BEGIN(1, 1);
17858 IEM_MC_ARG(uint64_t, u64Target, 0);
17859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17862 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17863 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17864 IEM_MC_END()
17865 return VINF_SUCCESS;
17866
17867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17868 }
17869 }
17870}
17871
17872typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17873
17874FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17875{
17876 /* Registers? How?? */
17877 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
17878 { /* likely */ }
17879 else
17880 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17881
17882 /* Far pointer loaded from memory. */
17883 switch (pVCpu->iem.s.enmEffOpSize)
17884 {
17885 case IEMMODE_16BIT:
17886 IEM_MC_BEGIN(3, 1);
17887 IEM_MC_ARG(uint16_t, u16Sel, 0);
17888 IEM_MC_ARG(uint16_t, offSeg, 1);
17889 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17893 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17894 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
17895 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17896 IEM_MC_END();
17897 return VINF_SUCCESS;
17898
17899 case IEMMODE_64BIT:
17900 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17901 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17902 * and call far qword [rsp] encodings. */
17903 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
17904 {
17905 IEM_MC_BEGIN(3, 1);
17906 IEM_MC_ARG(uint16_t, u16Sel, 0);
17907 IEM_MC_ARG(uint64_t, offSeg, 1);
17908 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17910 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17912 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17913 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
17914 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17915 IEM_MC_END();
17916 return VINF_SUCCESS;
17917 }
17918 /* AMD falls thru. */
17919
17920 case IEMMODE_32BIT:
17921 IEM_MC_BEGIN(3, 1);
17922 IEM_MC_ARG(uint16_t, u16Sel, 0);
17923 IEM_MC_ARG(uint32_t, offSeg, 1);
17924 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17928 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
17929 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
17930 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17931 IEM_MC_END();
17932 return VINF_SUCCESS;
17933
17934 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17935 }
17936}
17937
17938
17939/**
17940 * Opcode 0xff /3.
17941 * @param bRm The RM byte.
17942 */
17943FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17944{
17945 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
17946 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17947}
17948
17949
17950/**
17951 * Opcode 0xff /4.
17952 * @param bRm The RM byte.
17953 */
17954FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17955{
17956 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
17957 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17958
17959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17960 {
17961 /* The new RIP is taken from a register. */
17962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17963 switch (pVCpu->iem.s.enmEffOpSize)
17964 {
17965 case IEMMODE_16BIT:
17966 IEM_MC_BEGIN(0, 1);
17967 IEM_MC_LOCAL(uint16_t, u16Target);
17968 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17969 IEM_MC_SET_RIP_U16(u16Target);
17970 IEM_MC_END()
17971 return VINF_SUCCESS;
17972
17973 case IEMMODE_32BIT:
17974 IEM_MC_BEGIN(0, 1);
17975 IEM_MC_LOCAL(uint32_t, u32Target);
17976 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17977 IEM_MC_SET_RIP_U32(u32Target);
17978 IEM_MC_END()
17979 return VINF_SUCCESS;
17980
17981 case IEMMODE_64BIT:
17982 IEM_MC_BEGIN(0, 1);
17983 IEM_MC_LOCAL(uint64_t, u64Target);
17984 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
17985 IEM_MC_SET_RIP_U64(u64Target);
17986 IEM_MC_END()
17987 return VINF_SUCCESS;
17988
17989 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17990 }
17991 }
17992 else
17993 {
17994 /* The new RIP is taken from a memory location. */
17995 switch (pVCpu->iem.s.enmEffOpSize)
17996 {
17997 case IEMMODE_16BIT:
17998 IEM_MC_BEGIN(0, 2);
17999 IEM_MC_LOCAL(uint16_t, u16Target);
18000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18003 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18004 IEM_MC_SET_RIP_U16(u16Target);
18005 IEM_MC_END()
18006 return VINF_SUCCESS;
18007
18008 case IEMMODE_32BIT:
18009 IEM_MC_BEGIN(0, 2);
18010 IEM_MC_LOCAL(uint32_t, u32Target);
18011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18014 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18015 IEM_MC_SET_RIP_U32(u32Target);
18016 IEM_MC_END()
18017 return VINF_SUCCESS;
18018
18019 case IEMMODE_64BIT:
18020 IEM_MC_BEGIN(0, 2);
18021 IEM_MC_LOCAL(uint64_t, u64Target);
18022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18025 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18026 IEM_MC_SET_RIP_U64(u64Target);
18027 IEM_MC_END()
18028 return VINF_SUCCESS;
18029
18030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18031 }
18032 }
18033}
18034
18035
18036/**
18037 * Opcode 0xff /5.
18038 * @param bRm The RM byte.
18039 */
18040FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
18041{
18042 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
18043 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
18044}
18045
18046
18047/**
18048 * Opcode 0xff /6.
18049 * @param bRm The RM byte.
18050 */
18051FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
18052{
18053 IEMOP_MNEMONIC(push_Ev, "push Ev");
18054
18055 /* Registers are handled by a common worker. */
18056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
18057 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
18058
18059 /* Memory we do here. */
18060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
18061 switch (pVCpu->iem.s.enmEffOpSize)
18062 {
18063 case IEMMODE_16BIT:
18064 IEM_MC_BEGIN(0, 2);
18065 IEM_MC_LOCAL(uint16_t, u16Src);
18066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18069 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18070 IEM_MC_PUSH_U16(u16Src);
18071 IEM_MC_ADVANCE_RIP();
18072 IEM_MC_END();
18073 return VINF_SUCCESS;
18074
18075 case IEMMODE_32BIT:
18076 IEM_MC_BEGIN(0, 2);
18077 IEM_MC_LOCAL(uint32_t, u32Src);
18078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18081 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18082 IEM_MC_PUSH_U32(u32Src);
18083 IEM_MC_ADVANCE_RIP();
18084 IEM_MC_END();
18085 return VINF_SUCCESS;
18086
18087 case IEMMODE_64BIT:
18088 IEM_MC_BEGIN(0, 2);
18089 IEM_MC_LOCAL(uint64_t, u64Src);
18090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
18091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
18092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
18093 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
18094 IEM_MC_PUSH_U64(u64Src);
18095 IEM_MC_ADVANCE_RIP();
18096 IEM_MC_END();
18097 return VINF_SUCCESS;
18098
18099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
18100 }
18101}
18102
18103
18104/** Opcode 0xff. */
18105FNIEMOP_DEF(iemOp_Grp5)
18106{
18107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
18108 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
18109 {
18110 case 0:
18111 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
18112 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
18113 case 1:
18114 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
18115 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
18116 case 2:
18117 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
18118 case 3:
18119 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
18120 case 4:
18121 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
18122 case 5:
18123 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
18124 case 6:
18125 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
18126 case 7:
18127 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
18128 return IEMOP_RAISE_INVALID_OPCODE();
18129 }
18130 AssertFailedReturn(VERR_IEM_IPE_3);
18131}
18132
18133
18134
18135const PFNIEMOP g_apfnOneByteMap[256] =
18136{
18137 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
18138 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
18139 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
18140 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
18141 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
18142 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
18143 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
18144 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
18145 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
18146 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
18147 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
18148 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
18149 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
18150 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
18151 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
18152 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
18153 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
18154 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
18155 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
18156 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
18157 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
18158 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
18159 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
18160 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
18161 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
18162 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
18163 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
18164 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
18165 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
18166 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
18167 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
18168 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
18169 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
18170 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
18171 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
18172 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
18173 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
18174 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
18175 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
18176 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
18177 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
18178 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
18179 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
18180 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
18181 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
18182 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
18183 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
18184 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
18185 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
18186 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
18187 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
18188 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
18189 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
18190 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
18191 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
18192 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
18193 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
18194 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
18195 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
18196 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
18197 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
18198 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
18199 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
18200 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
18201};
18202
18203
18204/** @} */
18205
18206#ifdef _MSC_VER
18207# pragma warning(pop)
18208#endif
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette